aboutsummaryrefslogtreecommitdiffstats
path: root/activestorage
diff options
context:
space:
mode:
Diffstat (limited to 'activestorage')
-rw-r--r--activestorage/.babelrc3
-rw-r--r--activestorage/.gitignore12
-rw-r--r--activestorage/CHANGELOG.md98
-rw-r--r--activestorage/MIT-LICENSE2
-rw-r--r--activestorage/README.md20
-rw-r--r--activestorage/Rakefile11
-rw-r--r--activestorage/activestorage.gemspec6
-rw-r--r--activestorage/app/assets/javascripts/activestorage.js940
-rw-r--r--activestorage/app/controllers/active_storage/base_controller.rb8
-rw-r--r--activestorage/app/controllers/active_storage/blobs_controller.rb12
-rw-r--r--activestorage/app/controllers/active_storage/direct_uploads_controller.rb4
-rw-r--r--activestorage/app/controllers/active_storage/disk_controller.rb21
-rw-r--r--activestorage/app/controllers/active_storage/previews_controller.rb12
-rw-r--r--activestorage/app/controllers/active_storage/representations_controller.rb14
-rw-r--r--activestorage/app/controllers/active_storage/variants_controller.rb16
-rw-r--r--activestorage/app/controllers/concerns/active_storage/set_blob.rb16
-rw-r--r--activestorage/app/controllers/concerns/active_storage/set_current.rb15
-rw-r--r--activestorage/app/javascript/activestorage/blob_record.js20
-rw-r--r--activestorage/app/javascript/activestorage/blob_upload.js2
-rw-r--r--activestorage/app/javascript/activestorage/direct_upload.js6
-rw-r--r--activestorage/app/javascript/activestorage/file_checksum.js2
-rw-r--r--activestorage/app/javascript/activestorage/helpers.js11
-rw-r--r--activestorage/app/javascript/activestorage/ujs.js15
-rw-r--r--activestorage/app/jobs/active_storage/analyze_job.rb2
-rw-r--r--activestorage/app/jobs/active_storage/purge_job.rb4
-rw-r--r--activestorage/app/models/active_storage/attachment.rb28
-rw-r--r--activestorage/app/models/active_storage/blob.rb210
-rw-r--r--activestorage/app/models/active_storage/blob/analyzable.rb57
-rw-r--r--activestorage/app/models/active_storage/blob/identifiable.rb20
-rw-r--r--activestorage/app/models/active_storage/blob/representable.rb93
-rw-r--r--activestorage/app/models/active_storage/current.rb5
-rw-r--r--activestorage/app/models/active_storage/filename.rb12
-rw-r--r--activestorage/app/models/active_storage/filename/parameters.rb2
-rw-r--r--activestorage/app/models/active_storage/preview.rb9
-rw-r--r--activestorage/app/models/active_storage/variant.rb85
-rw-r--r--activestorage/app/models/active_storage/variation.rb39
-rw-r--r--activestorage/config/routes.rb44
-rw-r--r--activestorage/db/migrate/20170806125915_create_active_storage_tables.rb1
-rw-r--r--activestorage/lib/active_storage.rb19
-rw-r--r--activestorage/lib/active_storage/analyzer.rb15
-rw-r--r--activestorage/lib/active_storage/analyzer/image_analyzer.rb15
-rw-r--r--activestorage/lib/active_storage/analyzer/video_analyzer.rb65
-rw-r--r--activestorage/lib/active_storage/attached.rb29
-rw-r--r--activestorage/lib/active_storage/attached/changes.rb16
-rw-r--r--activestorage/lib/active_storage/attached/changes/create_many.rb46
-rw-r--r--activestorage/lib/active_storage/attached/changes/create_one.rb68
-rw-r--r--activestorage/lib/active_storage/attached/changes/create_one_of_many.rb10
-rw-r--r--activestorage/lib/active_storage/attached/changes/delete_many.rb23
-rw-r--r--activestorage/lib/active_storage/attached/changes/delete_one.rb19
-rw-r--r--activestorage/lib/active_storage/attached/macros.rb88
-rw-r--r--activestorage/lib/active_storage/attached/many.rb42
-rw-r--r--activestorage/lib/active_storage/attached/model.rb140
-rw-r--r--activestorage/lib/active_storage/attached/one.rb45
-rw-r--r--activestorage/lib/active_storage/downloader.rb44
-rw-r--r--activestorage/lib/active_storage/downloading.rb29
-rw-r--r--activestorage/lib/active_storage/engine.rb55
-rw-r--r--activestorage/lib/active_storage/errors.rb22
-rw-r--r--activestorage/lib/active_storage/gem_version.rb4
-rw-r--r--activestorage/lib/active_storage/log_subscriber.rb6
-rw-r--r--activestorage/lib/active_storage/previewer.rb50
-rw-r--r--activestorage/lib/active_storage/previewer/mupdf_previewer.rb36
-rw-r--r--activestorage/lib/active_storage/previewer/pdf_previewer.rb17
-rw-r--r--activestorage/lib/active_storage/previewer/poppler_pdf_previewer.rb35
-rw-r--r--activestorage/lib/active_storage/previewer/video_previewer.rb9
-rw-r--r--activestorage/lib/active_storage/reflection.rb64
-rw-r--r--activestorage/lib/active_storage/service.rb22
-rw-r--r--activestorage/lib/active_storage/service/azure_storage_service.rb63
-rw-r--r--activestorage/lib/active_storage/service/configurator.rb4
-rw-r--r--activestorage/lib/active_storage/service/disk_service.rb71
-rw-r--r--activestorage/lib/active_storage/service/gcs_service.rb99
-rw-r--r--activestorage/lib/active_storage/service/mirror_service.rb7
-rw-r--r--activestorage/lib/active_storage/service/s3_service.rb32
-rw-r--r--activestorage/lib/active_storage/transformers/image_processing_transformer.rb39
-rw-r--r--activestorage/lib/active_storage/transformers/mini_magick_transformer.rb38
-rw-r--r--activestorage/lib/active_storage/transformers/transformer.rb42
-rw-r--r--activestorage/lib/tasks/activestorage.rake9
-rw-r--r--activestorage/package.json22
-rw-r--r--activestorage/rollup.config.js28
-rw-r--r--activestorage/test/analyzer/image_analyzer_test.rb20
-rw-r--r--activestorage/test/analyzer/video_analyzer_test.rb33
-rw-r--r--activestorage/test/controllers/blobs_controller_test.rb5
-rw-r--r--activestorage/test/controllers/direct_uploads_controller_test.rb27
-rw-r--r--activestorage/test/controllers/disk_controller_test.rb29
-rw-r--r--activestorage/test/controllers/previews_controller_test.rb24
-rw-r--r--activestorage/test/controllers/representations_controller_test.rb61
-rw-r--r--activestorage/test/controllers/variants_controller_test.rb23
-rw-r--r--activestorage/test/database/setup.rb2
-rw-r--r--activestorage/test/dummy/config/application.rb6
-rw-r--r--activestorage/test/dummy/config/environments/test.rb3
-rw-r--r--activestorage/test/dummy/config/secrets.yml2
-rw-r--r--activestorage/test/fixtures/files/favicon.icobin0 -> 16958 bytes
-rw-r--r--activestorage/test/fixtures/files/icon.psdbin0 -> 37441 bytes
-rw-r--r--activestorage/test/fixtures/files/icon.svg13
-rw-r--r--activestorage/test/fixtures/files/image.gifbin0 -> 2032 bytes
-rw-r--r--activestorage/test/fixtures/files/racecar_rotated.jpgbin0 -> 1124060 bytes
-rw-r--r--activestorage/test/fixtures/files/video_with_rectangular_samples.mp4bin0 -> 361535 bytes
-rw-r--r--activestorage/test/fixtures/files/video_with_undefined_display_aspect_ratio.mp4bin0 -> 128737 bytes
-rw-r--r--activestorage/test/jobs/purge_job_test.rb27
-rw-r--r--activestorage/test/models/attached/many_test.rb596
-rw-r--r--activestorage/test/models/attached/one_test.rb513
-rw-r--r--activestorage/test/models/attachments_test.rb220
-rw-r--r--activestorage/test/models/blob_test.rb153
-rw-r--r--activestorage/test/models/presence_validation_test.rb30
-rw-r--r--activestorage/test/models/preview_test.rb10
-rw-r--r--activestorage/test/models/reflection_test.rb34
-rw-r--r--activestorage/test/models/representation_test.rb2
-rw-r--r--activestorage/test/models/variant_test.rb161
-rw-r--r--activestorage/test/previewer/mupdf_previewer_test.rb (renamed from activestorage/test/previewer/pdf_previewer_test.rb)6
-rw-r--r--activestorage/test/previewer/poppler_pdf_previewer_test.rb23
-rw-r--r--activestorage/test/previewer/video_previewer_test.rb5
-rw-r--r--activestorage/test/service/azure_storage_service_test.rb19
-rw-r--r--activestorage/test/service/configurations.example.yml3
-rw-r--r--activestorage/test/service/configurations.yml.encbin2864 -> 2848 bytes
-rw-r--r--activestorage/test/service/configurator_test.rb7
-rw-r--r--activestorage/test/service/disk_service_test.rb8
-rw-r--r--activestorage/test/service/gcs_service_test.rb21
-rw-r--r--activestorage/test/service/mirror_service_test.rb44
-rw-r--r--activestorage/test/service/s3_service_test.rb6
-rw-r--r--activestorage/test/service/shared_service_tests.rb54
-rw-r--r--activestorage/test/template/image_tag_test.rb2
-rw-r--r--activestorage/test/test_helper.rb46
-rw-r--r--activestorage/webpack.config.js27
-rw-r--r--activestorage/yarn.lock1491
123 files changed, 4746 insertions, 2374 deletions
diff --git a/activestorage/.babelrc b/activestorage/.babelrc
index a8211d329f..ed751f8745 100644
--- a/activestorage/.babelrc
+++ b/activestorage/.babelrc
@@ -1,5 +1,8 @@
{
"presets": [
["env", { "modules": false } ]
+ ],
+ "plugins": [
+ "external-helpers"
]
}
diff --git a/activestorage/.gitignore b/activestorage/.gitignore
index a532335bdd..3e78878ffc 100644
--- a/activestorage/.gitignore
+++ b/activestorage/.gitignore
@@ -1,6 +1,6 @@
-.byebug_history
-node_modules
-test/dummy/db/*.sqlite3
-test/dummy/db/*.sqlite3-journal
-test/dummy/log/*.log
-test/dummy/tmp/
+/src/
+/test/dummy/db/*.sqlite3
+/test/dummy/db/*.sqlite3-journal
+/test/dummy/log/*.log
+/test/dummy/tmp/
+/test/service/configurations.yml
diff --git a/activestorage/CHANGELOG.md b/activestorage/CHANGELOG.md
index 358552313f..8bfda4799e 100644
--- a/activestorage/CHANGELOG.md
+++ b/activestorage/CHANGELOG.md
@@ -1,3 +1,97 @@
-* Added to Rails.
+* Added the `ActiveStorage::SetCurrent` concern for custom Active Storage
+ controllers that can't inherit from `ActiveStorage::BaseController`.
- *DHH*
+ *George Claghorn*
+
+* Active Storage error classes like `ActiveStorage::IntegrityError` and
+ `ActiveStorage::UnrepresentableError` now inherit from `ActiveStorage::Error`
+ instead of `StandardError`. This permits rescuing `ActiveStorage::Error` to
+ handle all Active Storage errors.
+
+ *Andrei Makarov*, *George Claghorn*
+
+* Uploaded files assigned to a record are persisted to storage when the record
+ is saved instead of immediately.
+
+ In Rails 5.2, the following causes an uploaded file in `params[:avatar]` to
+ be stored:
+
+ ```ruby
+ @user.avatar = params[:avatar]
+ ```
+
+ In Rails 6, the uploaded file is stored when `@user` is successfully saved.
+
+ *George Claghorn*
+
+* Add the ability to reflect on defined attachments using the existing
+ ActiveRecord reflection mechanism.
+
+ *Kevin Deisz*
+
+* Variant arguments of `false` or `nil` will no longer be passed to the
+ processor. For example, the following will not have the monochrome
+ variation applied:
+
+ ```ruby
+ avatar.variant(monochrome: false)
+ ```
+
+ *Jacob Smith*
+
+* Generated attachment getter and setter methods are created
+ within the model's `GeneratedAssociationMethods` module to
+ allow overriding and composition using `super`.
+
+ *Josh Susser*, *Jamon Douglas*
+
+* Add `ActiveStorage::Blob#open`, which downloads a blob to a tempfile on disk
+ and yields the tempfile. Deprecate `ActiveStorage::Downloading`.
+
+ *David Robertson*, *George Claghorn*
+
+* Pass in `identify: false` as an argument when providing a `content_type` for
+ `ActiveStorage::Attached::{One,Many}#attach` to bypass automatic content
+ type inference. For example:
+
+ ```ruby
+ @message.image.attach(
+ io: File.open('/path/to/file'),
+ filename: 'file.pdf',
+ content_type: 'application/pdf',
+ identify: false
+ )
+ ```
+
+ *Ryan Davidson*
+
+* The Google Cloud Storage service properly supports streaming downloads.
+ It now requires version 1.11 or newer of the google-cloud-storage gem.
+
+ *George Claghorn*
+
+* Use the [ImageProcessing](https://github.com/janko-m/image_processing) gem
+ for Active Storage variants, and deprecate the MiniMagick backend.
+
+ This means that variants are now automatically oriented if the original
+ image was rotated. Also, in addition to the existing ImageMagick
+ operations, variants can now use `:resize_to_fit`, `:resize_to_fill`, and
+ other ImageProcessing macros. These are now recommended over raw `:resize`,
+ as they also sharpen the thumbnail after resizing.
+
+ The ImageProcessing gem also comes with a backend implemented on
+ [libvips](http://jcupitt.github.io/libvips/), an alternative to
+ ImageMagick which has significantly better performance than
+ ImageMagick in most cases, both in terms of speed and memory usage. In
+ Active Storage it's now possible to switch to the libvips backend by
+ changing `Rails.application.config.active_storage.variant_processor` to
+ `:vips`.
+
+ *Janko Marohnić*
+
+* Rails 6 requires Ruby 2.4.1 or newer.
+
+ *Jeremy Daer*
+
+
+Please check [5-2-stable](https://github.com/rails/rails/blob/5-2-stable/activestorage/CHANGELOG.md) for previous changes.
diff --git a/activestorage/MIT-LICENSE b/activestorage/MIT-LICENSE
index 4e1c6cad79..eed89ac398 100644
--- a/activestorage/MIT-LICENSE
+++ b/activestorage/MIT-LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2017 David Heinemeier Hansson, Basecamp
+Copyright (c) 2017-2018 David Heinemeier Hansson, Basecamp
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
diff --git a/activestorage/README.md b/activestorage/README.md
index 78e4463c5a..b677721d95 100644
--- a/activestorage/README.md
+++ b/activestorage/README.md
@@ -1,10 +1,10 @@
# Active Storage
-Active Storage makes it simple to upload and reference files in cloud services like Amazon S3, Google Cloud Storage, or Microsoft Azure Storage, and attach those files to Active Records. Supports having one main service and mirrors in other services for redundancy. It also provides a disk service for testing or local deployments, but the focus is on cloud storage.
+Active Storage makes it simple to upload and reference files in cloud services like [Amazon S3](https://aws.amazon.com/s3/), [Google Cloud Storage](https://cloud.google.com/storage/docs/), or [Microsoft Azure Storage](https://azure.microsoft.com/en-us/services/storage/), and attach those files to Active Records. Supports having one main service and mirrors in other services for redundancy. It also provides a disk service for testing or local deployments, but the focus is on cloud storage.
Files can be uploaded from the server to the cloud or directly from the client to the cloud.
-Image files can furthermore be transformed using on-demand variants for quality, aspect ratio, size, or any other [MiniMagick](https://github.com/minimagick/minimagick) supported transformation.
+Image files can furthermore be transformed using on-demand variants for quality, aspect ratio, size, or any other [MiniMagick](https://github.com/minimagick/minimagick) or [Vips](http://www.rubydoc.info/gems/ruby-vips/Vips/Image) supported transformation.
## Compared to other storage solutions
@@ -99,7 +99,7 @@ Variation of image attachment:
```erb
<%# Hitting the variant URL will lazy transform the original blob and then redirect to its new service location %>
-<%= image_tag user.avatar.variant(resize: "100x100") %>
+<%= image_tag user.avatar.variant(resize_to_fit: [100, 100]) %>
```
## Direct uploads
@@ -143,3 +143,17 @@ Active Storage, with its included JavaScript library, supports uploading directl
## License
Active Storage is released under the [MIT License](https://opensource.org/licenses/MIT).
+
+## Support
+
+API documentation is at:
+
+* http://api.rubyonrails.org
+
+Bug reports for the Ruby on Rails project can be filed here:
+
+* https://github.com/rails/rails/issues
+
+Feature requests should be discussed on the rails-core mailing list here:
+
+* https://groups.google.com/forum/?fromgroups#!forum/rubyonrails-core
diff --git a/activestorage/Rakefile b/activestorage/Rakefile
index 2aa4d2a76f..2e86d3d860 100644
--- a/activestorage/Rakefile
+++ b/activestorage/Rakefile
@@ -4,11 +4,12 @@ require "bundler/setup"
require "bundler/gem_tasks"
require "rake/testtask"
-Rake::TestTask.new do |test|
- test.libs << "app/controllers"
- test.libs << "test"
- test.test_files = FileList["test/**/*_test.rb"]
- test.warning = false
+Rake::TestTask.new do |t|
+ t.libs << "app/controllers"
+ t.libs << "test"
+ t.test_files = FileList["test/**/*_test.rb"]
+ t.verbose = true
+ t.warning = true
end
task :package
diff --git a/activestorage/activestorage.gemspec b/activestorage/activestorage.gemspec
index 911e1a0469..cb1bb00a25 100644
--- a/activestorage/activestorage.gemspec
+++ b/activestorage/activestorage.gemspec
@@ -9,7 +9,7 @@ Gem::Specification.new do |s|
s.summary = "Local and cloud file storage framework."
s.description = "Attach cloud and local files in Rails applications."
- s.required_ruby_version = ">= 2.2.2"
+ s.required_ruby_version = ">= 2.4.1"
s.license = "MIT"
@@ -17,7 +17,7 @@ Gem::Specification.new do |s|
s.email = "david@loudthinking.com"
s.homepage = "http://rubyonrails.org"
- s.files = Dir["CHANGELOG.md", "MIT-LICENSE", "README.md", "lib/**/*", "app/**/*", "config/**/*"]
+ s.files = Dir["CHANGELOG.md", "MIT-LICENSE", "README.md", "lib/**/*", "app/**/*", "config/**/*", "db/**/*"]
s.require_path = "lib"
s.metadata = {
@@ -27,4 +27,6 @@ Gem::Specification.new do |s|
s.add_dependency "actionpack", version
s.add_dependency "activerecord", version
+
+ s.add_dependency "marcel", "~> 0.3.1"
end
diff --git a/activestorage/app/assets/javascripts/activestorage.js b/activestorage/app/assets/javascripts/activestorage.js
index c1c0a2f6d9..375eb6b533 100644
--- a/activestorage/app/assets/javascripts/activestorage.js
+++ b/activestorage/app/assets/javascripts/activestorage.js
@@ -1 +1,939 @@
-!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ActiveStorage=e():t.ActiveStorage=e()}(this,function(){return function(t){function e(n){if(r[n])return r[n].exports;var i=r[n]={i:n,l:!1,exports:{}};return t[n].call(i.exports,i,i.exports,e),i.l=!0,i.exports}var r={};return e.m=t,e.c=r,e.d=function(t,r,n){e.o(t,r)||Object.defineProperty(t,r,{configurable:!1,enumerable:!0,get:n})},e.n=function(t){var r=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(r,"a",r),r},e.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},e.p="",e(e.s=2)}([function(t,e,r){"use strict";function n(t){var e=a(document.head,'meta[name="'+t+'"]');if(e)return e.getAttribute("content")}function i(t,e){return"string"==typeof t&&(e=t,t=document),o(t.querySelectorAll(e))}function a(t,e){return"string"==typeof t&&(e=t,t=document),t.querySelector(e)}function u(t,e){var r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=r.bubbles,i=r.cancelable,a=r.detail,u=document.createEvent("Event");return u.initEvent(e,n||!0,i||!0),u.detail=a||{},t.dispatchEvent(u),u}function o(t){return Array.isArray(t)?t:Array.from?Array.from(t):[].slice.call(t)}e.d=n,e.c=i,e.b=a,e.a=u,e.e=o},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function i(t,e){if(t&&"function"==typeof t[e]){for(var r=arguments.length,n=Array(r>2?r-2:0),i=2;i<r;i++)n[i-2]=arguments[i];return t[e].apply(t,n)}}r.d(e,"a",function(){return c});var a=r(6),u=r(8),o=r(9),s=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),f=0,c=function(){function t(e,r,i){n(this,t),this.id=++f,this.file=e,this.url=r,this.delegate=i}return s(t,[{key:"create",value:function(t){var e=this;a.a.create(this.file,function(r,n){var a=new u.a(e.file,n,e.url);i(e.delegate,"directUploadWillCreateBlobWithXHR",a.xhr),a.create(function(r){if(r)t(r);else{var n=new o.a(a);i(e.delegate,"directUploadWillStoreFileWithXHR",n.xhr),n.create(function(e){e?t(e):t(null,a.toJSON())})}})})}}]),t}()},function(t,e,r){"use strict";function n(){window.ActiveStorage&&Object(i.a)()}Object.defineProperty(e,"__esModule",{value:!0});var i=r(3),a=r(1);r.d(e,"start",function(){return i.a}),r.d(e,"DirectUpload",function(){return a.a}),setTimeout(n,1)},function(t,e,r){"use strict";function n(){d||(d=!0,document.addEventListener("submit",i),document.addEventListener("ajax:before",a))}function i(t){u(t)}function a(t){"FORM"==t.target.tagName&&u(t)}function u(t){var e=t.target;if(e.hasAttribute(l))return void t.preventDefault();var r=new c.a(e),n=r.inputs;n.length&&(t.preventDefault(),e.setAttribute(l,""),n.forEach(s),r.start(function(t){e.removeAttribute(l),t?n.forEach(f):o(e)}))}function o(t){var e=Object(h.b)(t,"input[type=submit]");if(e){var r=e,n=r.disabled;e.disabled=!1,e.focus(),e.click(),e.disabled=n}else e=document.createElement("input"),e.type="submit",e.style="display:none",t.appendChild(e),e.click(),t.removeChild(e)}function s(t){t.disabled=!0}function f(t){t.disabled=!1}e.a=n;var c=r(4),h=r(0),l="data-direct-uploads-processing",d=!1},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return s});var i=r(5),a=r(0),u=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),o="input[type=file][data-direct-upload-url]:not([disabled])",s=function(){function t(e){n(this,t),this.form=e,this.inputs=Object(a.c)(e,o).filter(function(t){return t.files.length})}return u(t,[{key:"start",value:function(t){var e=this,r=this.createDirectUploadControllers();this.dispatch("start"),function n(){var i=r.shift();i?i.start(function(r){r?(t(r),e.dispatch("end")):n()}):(t(),e.dispatch("end"))}()}},{key:"createDirectUploadControllers",value:function(){var t=[];return this.inputs.forEach(function(e){Object(a.e)(e.files).forEach(function(r){var n=new i.a(e,r);t.push(n)})}),t}},{key:"dispatch",value:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return Object(a.a)(this.form,"direct-uploads:"+t,{detail:e})}}]),t}()},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return o});var i=r(1),a=r(0),u=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),o=function(){function t(e,r){n(this,t),this.input=e,this.file=r,this.directUpload=new i.a(this.file,this.url,this),this.dispatch("initialize")}return u(t,[{key:"start",value:function(t){var e=this,r=document.createElement("input");r.type="hidden",r.name=this.input.name,this.input.insertAdjacentElement("beforebegin",r),this.dispatch("start"),this.directUpload.create(function(n,i){n?(r.parentNode.removeChild(r),e.dispatchError(n)):r.value=i.signed_id,e.dispatch("end"),t(n)})}},{key:"uploadRequestDidProgress",value:function(t){var e=t.loaded/t.total*100;e&&this.dispatch("progress",{progress:e})}},{key:"dispatch",value:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return e.file=this.file,e.id=this.directUpload.id,Object(a.a)(this.input,"direct-upload:"+t,{detail:e})}},{key:"dispatchError",value:function(t){this.dispatch("error",{error:t}).defaultPrevented||alert(t)}},{key:"directUploadWillCreateBlobWithXHR",value:function(t){this.dispatch("before-blob-request",{xhr:t})}},{key:"directUploadWillStoreFileWithXHR",value:function(t){var e=this;this.dispatch("before-storage-request",{xhr:t}),t.upload.addEventListener("progress",function(t){return e.uploadRequestDidProgress(t)})}},{key:"url",get:function(){return this.input.getAttribute("data-direct-upload-url")}}]),t}()},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return s});var i=r(7),a=r.n(i),u=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),o=File.prototype.slice||File.prototype.mozSlice||File.prototype.webkitSlice,s=function(){function t(e){n(this,t),this.file=e,this.chunkSize=2097152,this.chunkCount=Math.ceil(this.file.size/this.chunkSize),this.chunkIndex=0}return u(t,null,[{key:"create",value:function(e,r){new t(e).create(r)}}]),u(t,[{key:"create",value:function(t){var e=this;this.callback=t,this.md5Buffer=new a.a.ArrayBuffer,this.fileReader=new FileReader,this.fileReader.addEventListener("load",function(t){return e.fileReaderDidLoad(t)}),this.fileReader.addEventListener("error",function(t){return e.fileReaderDidError(t)}),this.readNextChunk()}},{key:"fileReaderDidLoad",value:function(t){if(this.md5Buffer.append(t.target.result),!this.readNextChunk()){var e=this.md5Buffer.end(!0),r=btoa(e);this.callback(null,r)}}},{key:"fileReaderDidError",value:function(t){this.callback("Error reading "+this.file.name)}},{key:"readNextChunk",value:function(){if(this.chunkIndex<this.chunkCount){var t=this.chunkIndex*this.chunkSize,e=Math.min(t+this.chunkSize,this.file.size),r=o.call(this.file,t,e);return this.fileReader.readAsArrayBuffer(r),this.chunkIndex++,!0}return!1}}]),t}()},function(t,e,r){!function(e){t.exports=e()}(function(t){"use strict";function e(t,e){var r=t[0],n=t[1],i=t[2],a=t[3];r+=(n&i|~n&a)+e[0]-680876936|0,r=(r<<7|r>>>25)+n|0,a+=(r&n|~r&i)+e[1]-389564586|0,a=(a<<12|a>>>20)+r|0,i+=(a&r|~a&n)+e[2]+606105819|0,i=(i<<17|i>>>15)+a|0,n+=(i&a|~i&r)+e[3]-1044525330|0,n=(n<<22|n>>>10)+i|0,r+=(n&i|~n&a)+e[4]-176418897|0,r=(r<<7|r>>>25)+n|0,a+=(r&n|~r&i)+e[5]+1200080426|0,a=(a<<12|a>>>20)+r|0,i+=(a&r|~a&n)+e[6]-1473231341|0,i=(i<<17|i>>>15)+a|0,n+=(i&a|~i&r)+e[7]-45705983|0,n=(n<<22|n>>>10)+i|0,r+=(n&i|~n&a)+e[8]+1770035416|0,r=(r<<7|r>>>25)+n|0,a+=(r&n|~r&i)+e[9]-1958414417|0,a=(a<<12|a>>>20)+r|0,i+=(a&r|~a&n)+e[10]-42063|0,i=(i<<17|i>>>15)+a|0,n+=(i&a|~i&r)+e[11]-1990404162|0,n=(n<<22|n>>>10)+i|0,r+=(n&i|~n&a)+e[12]+1804603682|0,r=(r<<7|r>>>25)+n|0,a+=(r&n|~r&i)+e[13]-40341101|0,a=(a<<12|a>>>20)+r|0,i+=(a&r|~a&n)+e[14]-1502002290|0,i=(i<<17|i>>>15)+a|0,n+=(i&a|~i&r)+e[15]+1236535329|0,n=(n<<22|n>>>10)+i|0,r+=(n&a|i&~a)+e[1]-165796510|0,r=(r<<5|r>>>27)+n|0,a+=(r&i|n&~i)+e[6]-1069501632|0,a=(a<<9|a>>>23)+r|0,i+=(a&n|r&~n)+e[11]+643717713|0,i=(i<<14|i>>>18)+a|0,n+=(i&r|a&~r)+e[0]-373897302|0,n=(n<<20|n>>>12)+i|0,r+=(n&a|i&~a)+e[5]-701558691|0,r=(r<<5|r>>>27)+n|0,a+=(r&i|n&~i)+e[10]+38016083|0,a=(a<<9|a>>>23)+r|0,i+=(a&n|r&~n)+e[15]-660478335|0,i=(i<<14|i>>>18)+a|0,n+=(i&r|a&~r)+e[4]-405537848|0,n=(n<<20|n>>>12)+i|0,r+=(n&a|i&~a)+e[9]+568446438|0,r=(r<<5|r>>>27)+n|0,a+=(r&i|n&~i)+e[14]-1019803690|0,a=(a<<9|a>>>23)+r|0,i+=(a&n|r&~n)+e[3]-187363961|0,i=(i<<14|i>>>18)+a|0,n+=(i&r|a&~r)+e[8]+1163531501|0,n=(n<<20|n>>>12)+i|0,r+=(n&a|i&~a)+e[13]-1444681467|0,r=(r<<5|r>>>27)+n|0,a+=(r&i|n&~i)+e[2]-51403784|0,a=(a<<9|a>>>23)+r|0,i+=(a&n|r&~n)+e[7]+1735328473|0,i=(i<<14|i>>>18)+a|0,n+=(i&r|a&~r)+e[12]-1926607734|0,n=(n<<20|n>>>12)+i|0,r+=(n^i^a)+e[5]-378558|0,r=(r<<4|r>>>28)+n|0,a+=(r^n^i)+e[8]-2022574463|0,a=(a<<11|a>>>21)+r|0,i+=(a^r^n)+e[11]+1839030562|0,i=(i<<16|i>>>16)+a|0,n+=(i^a^r)+e[14]-35309556|0,n=(n<<23|n>>>9)+i|0,r+=(n^i^a)+e[1]-1530992060|0,r=(r<<4|r>>>28)+n|0,a+=(r^n^i)+e[4]+1272893353|0,a=(a<<11|a>>>21)+r|0,i+=(a^r^n)+e[7]-155497632|0,i=(i<<16|i>>>16)+a|0,n+=(i^a^r)+e[10]-1094730640|0,n=(n<<23|n>>>9)+i|0,r+=(n^i^a)+e[13]+681279174|0,r=(r<<4|r>>>28)+n|0,a+=(r^n^i)+e[0]-358537222|0,a=(a<<11|a>>>21)+r|0,i+=(a^r^n)+e[3]-722521979|0,i=(i<<16|i>>>16)+a|0,n+=(i^a^r)+e[6]+76029189|0,n=(n<<23|n>>>9)+i|0,r+=(n^i^a)+e[9]-640364487|0,r=(r<<4|r>>>28)+n|0,a+=(r^n^i)+e[12]-421815835|0,a=(a<<11|a>>>21)+r|0,i+=(a^r^n)+e[15]+530742520|0,i=(i<<16|i>>>16)+a|0,n+=(i^a^r)+e[2]-995338651|0,n=(n<<23|n>>>9)+i|0,r+=(i^(n|~a))+e[0]-198630844|0,r=(r<<6|r>>>26)+n|0,a+=(n^(r|~i))+e[7]+1126891415|0,a=(a<<10|a>>>22)+r|0,i+=(r^(a|~n))+e[14]-1416354905|0,i=(i<<15|i>>>17)+a|0,n+=(a^(i|~r))+e[5]-57434055|0,n=(n<<21|n>>>11)+i|0,r+=(i^(n|~a))+e[12]+1700485571|0,r=(r<<6|r>>>26)+n|0,a+=(n^(r|~i))+e[3]-1894986606|0,a=(a<<10|a>>>22)+r|0,i+=(r^(a|~n))+e[10]-1051523|0,i=(i<<15|i>>>17)+a|0,n+=(a^(i|~r))+e[1]-2054922799|0,n=(n<<21|n>>>11)+i|0,r+=(i^(n|~a))+e[8]+1873313359|0,r=(r<<6|r>>>26)+n|0,a+=(n^(r|~i))+e[15]-30611744|0,a=(a<<10|a>>>22)+r|0,i+=(r^(a|~n))+e[6]-1560198380|0,i=(i<<15|i>>>17)+a|0,n+=(a^(i|~r))+e[13]+1309151649|0,n=(n<<21|n>>>11)+i|0,r+=(i^(n|~a))+e[4]-145523070|0,r=(r<<6|r>>>26)+n|0,a+=(n^(r|~i))+e[11]-1120210379|0,a=(a<<10|a>>>22)+r|0,i+=(r^(a|~n))+e[2]+718787259|0,i=(i<<15|i>>>17)+a|0,n+=(a^(i|~r))+e[9]-343485551|0,n=(n<<21|n>>>11)+i|0,t[0]=r+t[0]|0,t[1]=n+t[1]|0,t[2]=i+t[2]|0,t[3]=a+t[3]|0}function r(t){var e,r=[];for(e=0;e<64;e+=4)r[e>>2]=t.charCodeAt(e)+(t.charCodeAt(e+1)<<8)+(t.charCodeAt(e+2)<<16)+(t.charCodeAt(e+3)<<24);return r}function n(t){var e,r=[];for(e=0;e<64;e+=4)r[e>>2]=t[e]+(t[e+1]<<8)+(t[e+2]<<16)+(t[e+3]<<24);return r}function i(t){var n,i,a,u,o,s,f=t.length,c=[1732584193,-271733879,-1732584194,271733878];for(n=64;n<=f;n+=64)e(c,r(t.substring(n-64,n)));for(t=t.substring(n-64),i=t.length,a=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],n=0;n<i;n+=1)a[n>>2]|=t.charCodeAt(n)<<(n%4<<3);if(a[n>>2]|=128<<(n%4<<3),n>55)for(e(c,a),n=0;n<16;n+=1)a[n]=0;return u=8*f,u=u.toString(16).match(/(.*?)(.{0,8})$/),o=parseInt(u[2],16),s=parseInt(u[1],16)||0,a[14]=o,a[15]=s,e(c,a),c}function a(t){var r,i,a,u,o,s,f=t.length,c=[1732584193,-271733879,-1732584194,271733878];for(r=64;r<=f;r+=64)e(c,n(t.subarray(r-64,r)));for(t=r-64<f?t.subarray(r-64):new Uint8Array(0),i=t.length,a=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],r=0;r<i;r+=1)a[r>>2]|=t[r]<<(r%4<<3);if(a[r>>2]|=128<<(r%4<<3),r>55)for(e(c,a),r=0;r<16;r+=1)a[r]=0;return u=8*f,u=u.toString(16).match(/(.*?)(.{0,8})$/),o=parseInt(u[2],16),s=parseInt(u[1],16)||0,a[14]=o,a[15]=s,e(c,a),c}function u(t){var e,r="";for(e=0;e<4;e+=1)r+=p[t>>8*e+4&15]+p[t>>8*e&15];return r}function o(t){var e;for(e=0;e<t.length;e+=1)t[e]=u(t[e]);return t.join("")}function s(t){return/[\u0080-\uFFFF]/.test(t)&&(t=unescape(encodeURIComponent(t))),t}function f(t,e){var r,n=t.length,i=new ArrayBuffer(n),a=new Uint8Array(i);for(r=0;r<n;r+=1)a[r]=t.charCodeAt(r);return e?a:i}function c(t){return String.fromCharCode.apply(null,new Uint8Array(t))}function h(t,e,r){var n=new Uint8Array(t.byteLength+e.byteLength);return n.set(new Uint8Array(t)),n.set(new Uint8Array(e),t.byteLength),r?n:n.buffer}function l(t){var e,r=[],n=t.length;for(e=0;e<n-1;e+=2)r.push(parseInt(t.substr(e,2),16));return String.fromCharCode.apply(String,r)}function d(){this.reset()}var p=["0","1","2","3","4","5","6","7","8","9","a","b","c","d","e","f"];return"5d41402abc4b2a76b9719d911017c592"!==o(i("hello"))&&function(t,e){var r=(65535&t)+(65535&e);return(t>>16)+(e>>16)+(r>>16)<<16|65535&r},"undefined"==typeof ArrayBuffer||ArrayBuffer.prototype.slice||function(){function e(t,e){return t=0|t||0,t<0?Math.max(t+e,0):Math.min(t,e)}ArrayBuffer.prototype.slice=function(r,n){var i,a,u,o,s=this.byteLength,f=e(r,s),c=s;return n!==t&&(c=e(n,s)),f>c?new ArrayBuffer(0):(i=c-f,a=new ArrayBuffer(i),u=new Uint8Array(a),o=new Uint8Array(this,f,i),u.set(o),a)}}(),d.prototype.append=function(t){return this.appendBinary(s(t)),this},d.prototype.appendBinary=function(t){this._buff+=t,this._length+=t.length;var n,i=this._buff.length;for(n=64;n<=i;n+=64)e(this._hash,r(this._buff.substring(n-64,n)));return this._buff=this._buff.substring(n-64),this},d.prototype.end=function(t){var e,r,n=this._buff,i=n.length,a=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0];for(e=0;e<i;e+=1)a[e>>2]|=n.charCodeAt(e)<<(e%4<<3);return this._finish(a,i),r=o(this._hash),t&&(r=l(r)),this.reset(),r},d.prototype.reset=function(){return this._buff="",this._length=0,this._hash=[1732584193,-271733879,-1732584194,271733878],this},d.prototype.getState=function(){return{buff:this._buff,length:this._length,hash:this._hash}},d.prototype.setState=function(t){return this._buff=t.buff,this._length=t.length,this._hash=t.hash,this},d.prototype.destroy=function(){delete this._hash,delete this._buff,delete this._length},d.prototype._finish=function(t,r){var n,i,a,u=r;if(t[u>>2]|=128<<(u%4<<3),u>55)for(e(this._hash,t),u=0;u<16;u+=1)t[u]=0;n=8*this._length,n=n.toString(16).match(/(.*?)(.{0,8})$/),i=parseInt(n[2],16),a=parseInt(n[1],16)||0,t[14]=i,t[15]=a,e(this._hash,t)},d.hash=function(t,e){return d.hashBinary(s(t),e)},d.hashBinary=function(t,e){var r=i(t),n=o(r);return e?l(n):n},d.ArrayBuffer=function(){this.reset()},d.ArrayBuffer.prototype.append=function(t){var r,i=h(this._buff.buffer,t,!0),a=i.length;for(this._length+=t.byteLength,r=64;r<=a;r+=64)e(this._hash,n(i.subarray(r-64,r)));return this._buff=r-64<a?new Uint8Array(i.buffer.slice(r-64)):new Uint8Array(0),this},d.ArrayBuffer.prototype.end=function(t){var e,r,n=this._buff,i=n.length,a=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0];for(e=0;e<i;e+=1)a[e>>2]|=n[e]<<(e%4<<3);return this._finish(a,i),r=o(this._hash),t&&(r=l(r)),this.reset(),r},d.ArrayBuffer.prototype.reset=function(){return this._buff=new Uint8Array(0),this._length=0,this._hash=[1732584193,-271733879,-1732584194,271733878],this},d.ArrayBuffer.prototype.getState=function(){var t=d.prototype.getState.call(this);return t.buff=c(t.buff),t},d.ArrayBuffer.prototype.setState=function(t){return t.buff=f(t.buff,!0),d.prototype.setState.call(this,t)},d.ArrayBuffer.prototype.destroy=d.prototype.destroy,d.ArrayBuffer.prototype._finish=d.prototype._finish,d.ArrayBuffer.hash=function(t,e){var r=a(new Uint8Array(t)),n=o(r);return e?l(n):n},d})},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return u});var i=r(0),a=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),u=function(){function t(e,r,a){var u=this;n(this,t),this.file=e,this.attributes={filename:e.name,content_type:e.type,byte_size:e.size,checksum:r},this.xhr=new XMLHttpRequest,this.xhr.open("POST",a,!0),this.xhr.responseType="json",this.xhr.setRequestHeader("Content-Type","application/json"),this.xhr.setRequestHeader("Accept","application/json"),this.xhr.setRequestHeader("X-Requested-With","XMLHttpRequest"),this.xhr.setRequestHeader("X-CSRF-Token",Object(i.d)("csrf-token")),this.xhr.addEventListener("load",function(t){return u.requestDidLoad(t)}),this.xhr.addEventListener("error",function(t){return u.requestDidError(t)})}return a(t,[{key:"create",value:function(t){this.callback=t,this.xhr.send(JSON.stringify({blob:this.attributes}))}},{key:"requestDidLoad",value:function(t){var e=this.xhr,r=e.status,n=e.response;if(r>=200&&r<300){var i=n.direct_upload;delete n.direct_upload,this.attributes=n,this.directUploadData=i,this.callback(null,this.toJSON())}else this.requestDidError(t)}},{key:"requestDidError",value:function(t){this.callback('Error creating Blob for "'+this.file.name+'". Status: '+this.xhr.status)}},{key:"toJSON",value:function(){var t={};for(var e in this.attributes)t[e]=this.attributes[e];return t}}]),t}()},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return a});var i=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),a=function(){function t(e){var r=this;n(this,t),this.blob=e,this.file=e.file;var i=e.directUploadData,a=i.url,u=i.headers;this.xhr=new XMLHttpRequest,this.xhr.open("PUT",a,!0),this.xhr.responseType="text";for(var o in u)this.xhr.setRequestHeader(o,u[o]);this.xhr.addEventListener("load",function(t){return r.requestDidLoad(t)}),this.xhr.addEventListener("error",function(t){return r.requestDidError(t)})}return i(t,[{key:"create",value:function(t){this.callback=t,this.xhr.send(this.file)}},{key:"requestDidLoad",value:function(t){var e=this.xhr,r=e.status,n=e.response;r>=200&&r<300?this.callback(null,n):this.requestDidError(t)}},{key:"requestDidError",value:function(t){this.callback('Error storing "'+this.file.name+'". Status: '+this.xhr.status)}}]),t}()}])}); \ No newline at end of file
+(function(global, factory) {
+ typeof exports === "object" && typeof module !== "undefined" ? factory(exports) : typeof define === "function" && define.amd ? define([ "exports" ], factory) : factory(global.ActiveStorage = {});
+})(this, function(exports) {
+ "use strict";
+ function createCommonjsModule(fn, module) {
+ return module = {
+ exports: {}
+ }, fn(module, module.exports), module.exports;
+ }
+ var sparkMd5 = createCommonjsModule(function(module, exports) {
+ (function(factory) {
+ {
+ module.exports = factory();
+ }
+ })(function(undefined) {
+ var hex_chr = [ "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f" ];
+ function md5cycle(x, k) {
+ var a = x[0], b = x[1], c = x[2], d = x[3];
+ a += (b & c | ~b & d) + k[0] - 680876936 | 0;
+ a = (a << 7 | a >>> 25) + b | 0;
+ d += (a & b | ~a & c) + k[1] - 389564586 | 0;
+ d = (d << 12 | d >>> 20) + a | 0;
+ c += (d & a | ~d & b) + k[2] + 606105819 | 0;
+ c = (c << 17 | c >>> 15) + d | 0;
+ b += (c & d | ~c & a) + k[3] - 1044525330 | 0;
+ b = (b << 22 | b >>> 10) + c | 0;
+ a += (b & c | ~b & d) + k[4] - 176418897 | 0;
+ a = (a << 7 | a >>> 25) + b | 0;
+ d += (a & b | ~a & c) + k[5] + 1200080426 | 0;
+ d = (d << 12 | d >>> 20) + a | 0;
+ c += (d & a | ~d & b) + k[6] - 1473231341 | 0;
+ c = (c << 17 | c >>> 15) + d | 0;
+ b += (c & d | ~c & a) + k[7] - 45705983 | 0;
+ b = (b << 22 | b >>> 10) + c | 0;
+ a += (b & c | ~b & d) + k[8] + 1770035416 | 0;
+ a = (a << 7 | a >>> 25) + b | 0;
+ d += (a & b | ~a & c) + k[9] - 1958414417 | 0;
+ d = (d << 12 | d >>> 20) + a | 0;
+ c += (d & a | ~d & b) + k[10] - 42063 | 0;
+ c = (c << 17 | c >>> 15) + d | 0;
+ b += (c & d | ~c & a) + k[11] - 1990404162 | 0;
+ b = (b << 22 | b >>> 10) + c | 0;
+ a += (b & c | ~b & d) + k[12] + 1804603682 | 0;
+ a = (a << 7 | a >>> 25) + b | 0;
+ d += (a & b | ~a & c) + k[13] - 40341101 | 0;
+ d = (d << 12 | d >>> 20) + a | 0;
+ c += (d & a | ~d & b) + k[14] - 1502002290 | 0;
+ c = (c << 17 | c >>> 15) + d | 0;
+ b += (c & d | ~c & a) + k[15] + 1236535329 | 0;
+ b = (b << 22 | b >>> 10) + c | 0;
+ a += (b & d | c & ~d) + k[1] - 165796510 | 0;
+ a = (a << 5 | a >>> 27) + b | 0;
+ d += (a & c | b & ~c) + k[6] - 1069501632 | 0;
+ d = (d << 9 | d >>> 23) + a | 0;
+ c += (d & b | a & ~b) + k[11] + 643717713 | 0;
+ c = (c << 14 | c >>> 18) + d | 0;
+ b += (c & a | d & ~a) + k[0] - 373897302 | 0;
+ b = (b << 20 | b >>> 12) + c | 0;
+ a += (b & d | c & ~d) + k[5] - 701558691 | 0;
+ a = (a << 5 | a >>> 27) + b | 0;
+ d += (a & c | b & ~c) + k[10] + 38016083 | 0;
+ d = (d << 9 | d >>> 23) + a | 0;
+ c += (d & b | a & ~b) + k[15] - 660478335 | 0;
+ c = (c << 14 | c >>> 18) + d | 0;
+ b += (c & a | d & ~a) + k[4] - 405537848 | 0;
+ b = (b << 20 | b >>> 12) + c | 0;
+ a += (b & d | c & ~d) + k[9] + 568446438 | 0;
+ a = (a << 5 | a >>> 27) + b | 0;
+ d += (a & c | b & ~c) + k[14] - 1019803690 | 0;
+ d = (d << 9 | d >>> 23) + a | 0;
+ c += (d & b | a & ~b) + k[3] - 187363961 | 0;
+ c = (c << 14 | c >>> 18) + d | 0;
+ b += (c & a | d & ~a) + k[8] + 1163531501 | 0;
+ b = (b << 20 | b >>> 12) + c | 0;
+ a += (b & d | c & ~d) + k[13] - 1444681467 | 0;
+ a = (a << 5 | a >>> 27) + b | 0;
+ d += (a & c | b & ~c) + k[2] - 51403784 | 0;
+ d = (d << 9 | d >>> 23) + a | 0;
+ c += (d & b | a & ~b) + k[7] + 1735328473 | 0;
+ c = (c << 14 | c >>> 18) + d | 0;
+ b += (c & a | d & ~a) + k[12] - 1926607734 | 0;
+ b = (b << 20 | b >>> 12) + c | 0;
+ a += (b ^ c ^ d) + k[5] - 378558 | 0;
+ a = (a << 4 | a >>> 28) + b | 0;
+ d += (a ^ b ^ c) + k[8] - 2022574463 | 0;
+ d = (d << 11 | d >>> 21) + a | 0;
+ c += (d ^ a ^ b) + k[11] + 1839030562 | 0;
+ c = (c << 16 | c >>> 16) + d | 0;
+ b += (c ^ d ^ a) + k[14] - 35309556 | 0;
+ b = (b << 23 | b >>> 9) + c | 0;
+ a += (b ^ c ^ d) + k[1] - 1530992060 | 0;
+ a = (a << 4 | a >>> 28) + b | 0;
+ d += (a ^ b ^ c) + k[4] + 1272893353 | 0;
+ d = (d << 11 | d >>> 21) + a | 0;
+ c += (d ^ a ^ b) + k[7] - 155497632 | 0;
+ c = (c << 16 | c >>> 16) + d | 0;
+ b += (c ^ d ^ a) + k[10] - 1094730640 | 0;
+ b = (b << 23 | b >>> 9) + c | 0;
+ a += (b ^ c ^ d) + k[13] + 681279174 | 0;
+ a = (a << 4 | a >>> 28) + b | 0;
+ d += (a ^ b ^ c) + k[0] - 358537222 | 0;
+ d = (d << 11 | d >>> 21) + a | 0;
+ c += (d ^ a ^ b) + k[3] - 722521979 | 0;
+ c = (c << 16 | c >>> 16) + d | 0;
+ b += (c ^ d ^ a) + k[6] + 76029189 | 0;
+ b = (b << 23 | b >>> 9) + c | 0;
+ a += (b ^ c ^ d) + k[9] - 640364487 | 0;
+ a = (a << 4 | a >>> 28) + b | 0;
+ d += (a ^ b ^ c) + k[12] - 421815835 | 0;
+ d = (d << 11 | d >>> 21) + a | 0;
+ c += (d ^ a ^ b) + k[15] + 530742520 | 0;
+ c = (c << 16 | c >>> 16) + d | 0;
+ b += (c ^ d ^ a) + k[2] - 995338651 | 0;
+ b = (b << 23 | b >>> 9) + c | 0;
+ a += (c ^ (b | ~d)) + k[0] - 198630844 | 0;
+ a = (a << 6 | a >>> 26) + b | 0;
+ d += (b ^ (a | ~c)) + k[7] + 1126891415 | 0;
+ d = (d << 10 | d >>> 22) + a | 0;
+ c += (a ^ (d | ~b)) + k[14] - 1416354905 | 0;
+ c = (c << 15 | c >>> 17) + d | 0;
+ b += (d ^ (c | ~a)) + k[5] - 57434055 | 0;
+ b = (b << 21 | b >>> 11) + c | 0;
+ a += (c ^ (b | ~d)) + k[12] + 1700485571 | 0;
+ a = (a << 6 | a >>> 26) + b | 0;
+ d += (b ^ (a | ~c)) + k[3] - 1894986606 | 0;
+ d = (d << 10 | d >>> 22) + a | 0;
+ c += (a ^ (d | ~b)) + k[10] - 1051523 | 0;
+ c = (c << 15 | c >>> 17) + d | 0;
+ b += (d ^ (c | ~a)) + k[1] - 2054922799 | 0;
+ b = (b << 21 | b >>> 11) + c | 0;
+ a += (c ^ (b | ~d)) + k[8] + 1873313359 | 0;
+ a = (a << 6 | a >>> 26) + b | 0;
+ d += (b ^ (a | ~c)) + k[15] - 30611744 | 0;
+ d = (d << 10 | d >>> 22) + a | 0;
+ c += (a ^ (d | ~b)) + k[6] - 1560198380 | 0;
+ c = (c << 15 | c >>> 17) + d | 0;
+ b += (d ^ (c | ~a)) + k[13] + 1309151649 | 0;
+ b = (b << 21 | b >>> 11) + c | 0;
+ a += (c ^ (b | ~d)) + k[4] - 145523070 | 0;
+ a = (a << 6 | a >>> 26) + b | 0;
+ d += (b ^ (a | ~c)) + k[11] - 1120210379 | 0;
+ d = (d << 10 | d >>> 22) + a | 0;
+ c += (a ^ (d | ~b)) + k[2] + 718787259 | 0;
+ c = (c << 15 | c >>> 17) + d | 0;
+ b += (d ^ (c | ~a)) + k[9] - 343485551 | 0;
+ b = (b << 21 | b >>> 11) + c | 0;
+ x[0] = a + x[0] | 0;
+ x[1] = b + x[1] | 0;
+ x[2] = c + x[2] | 0;
+ x[3] = d + x[3] | 0;
+ }
+ function md5blk(s) {
+ var md5blks = [], i;
+ for (i = 0; i < 64; i += 4) {
+ md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24);
+ }
+ return md5blks;
+ }
+ function md5blk_array(a) {
+ var md5blks = [], i;
+ for (i = 0; i < 64; i += 4) {
+ md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24);
+ }
+ return md5blks;
+ }
+ function md51(s) {
+ var n = s.length, state = [ 1732584193, -271733879, -1732584194, 271733878 ], i, length, tail, tmp, lo, hi;
+ for (i = 64; i <= n; i += 64) {
+ md5cycle(state, md5blk(s.substring(i - 64, i)));
+ }
+ s = s.substring(i - 64);
+ length = s.length;
+ tail = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ];
+ for (i = 0; i < length; i += 1) {
+ tail[i >> 2] |= s.charCodeAt(i) << (i % 4 << 3);
+ }
+ tail[i >> 2] |= 128 << (i % 4 << 3);
+ if (i > 55) {
+ md5cycle(state, tail);
+ for (i = 0; i < 16; i += 1) {
+ tail[i] = 0;
+ }
+ }
+ tmp = n * 8;
+ tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
+ lo = parseInt(tmp[2], 16);
+ hi = parseInt(tmp[1], 16) || 0;
+ tail[14] = lo;
+ tail[15] = hi;
+ md5cycle(state, tail);
+ return state;
+ }
+ function md51_array(a) {
+ var n = a.length, state = [ 1732584193, -271733879, -1732584194, 271733878 ], i, length, tail, tmp, lo, hi;
+ for (i = 64; i <= n; i += 64) {
+ md5cycle(state, md5blk_array(a.subarray(i - 64, i)));
+ }
+ a = i - 64 < n ? a.subarray(i - 64) : new Uint8Array(0);
+ length = a.length;
+ tail = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ];
+ for (i = 0; i < length; i += 1) {
+ tail[i >> 2] |= a[i] << (i % 4 << 3);
+ }
+ tail[i >> 2] |= 128 << (i % 4 << 3);
+ if (i > 55) {
+ md5cycle(state, tail);
+ for (i = 0; i < 16; i += 1) {
+ tail[i] = 0;
+ }
+ }
+ tmp = n * 8;
+ tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
+ lo = parseInt(tmp[2], 16);
+ hi = parseInt(tmp[1], 16) || 0;
+ tail[14] = lo;
+ tail[15] = hi;
+ md5cycle(state, tail);
+ return state;
+ }
+ function rhex(n) {
+ var s = "", j;
+ for (j = 0; j < 4; j += 1) {
+ s += hex_chr[n >> j * 8 + 4 & 15] + hex_chr[n >> j * 8 & 15];
+ }
+ return s;
+ }
+ function hex(x) {
+ var i;
+ for (i = 0; i < x.length; i += 1) {
+ x[i] = rhex(x[i]);
+ }
+ return x.join("");
+ }
+ if (hex(md51("hello")) !== "5d41402abc4b2a76b9719d911017c592") ;
+ if (typeof ArrayBuffer !== "undefined" && !ArrayBuffer.prototype.slice) {
+ (function() {
+ function clamp(val, length) {
+ val = val | 0 || 0;
+ if (val < 0) {
+ return Math.max(val + length, 0);
+ }
+ return Math.min(val, length);
+ }
+ ArrayBuffer.prototype.slice = function(from, to) {
+ var length = this.byteLength, begin = clamp(from, length), end = length, num, target, targetArray, sourceArray;
+ if (to !== undefined) {
+ end = clamp(to, length);
+ }
+ if (begin > end) {
+ return new ArrayBuffer(0);
+ }
+ num = end - begin;
+ target = new ArrayBuffer(num);
+ targetArray = new Uint8Array(target);
+ sourceArray = new Uint8Array(this, begin, num);
+ targetArray.set(sourceArray);
+ return target;
+ };
+ })();
+ }
+ function toUtf8(str) {
+ if (/[\u0080-\uFFFF]/.test(str)) {
+ str = unescape(encodeURIComponent(str));
+ }
+ return str;
+ }
+ function utf8Str2ArrayBuffer(str, returnUInt8Array) {
+ var length = str.length, buff = new ArrayBuffer(length), arr = new Uint8Array(buff), i;
+ for (i = 0; i < length; i += 1) {
+ arr[i] = str.charCodeAt(i);
+ }
+ return returnUInt8Array ? arr : buff;
+ }
+ function arrayBuffer2Utf8Str(buff) {
+ return String.fromCharCode.apply(null, new Uint8Array(buff));
+ }
+ function concatenateArrayBuffers(first, second, returnUInt8Array) {
+ var result = new Uint8Array(first.byteLength + second.byteLength);
+ result.set(new Uint8Array(first));
+ result.set(new Uint8Array(second), first.byteLength);
+ return returnUInt8Array ? result : result.buffer;
+ }
+ function hexToBinaryString(hex) {
+ var bytes = [], length = hex.length, x;
+ for (x = 0; x < length - 1; x += 2) {
+ bytes.push(parseInt(hex.substr(x, 2), 16));
+ }
+ return String.fromCharCode.apply(String, bytes);
+ }
+ function SparkMD5() {
+ this.reset();
+ }
+ SparkMD5.prototype.append = function(str) {
+ this.appendBinary(toUtf8(str));
+ return this;
+ };
+ SparkMD5.prototype.appendBinary = function(contents) {
+ this._buff += contents;
+ this._length += contents.length;
+ var length = this._buff.length, i;
+ for (i = 64; i <= length; i += 64) {
+ md5cycle(this._hash, md5blk(this._buff.substring(i - 64, i)));
+ }
+ this._buff = this._buff.substring(i - 64);
+ return this;
+ };
+ SparkMD5.prototype.end = function(raw) {
+ var buff = this._buff, length = buff.length, i, tail = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ], ret;
+ for (i = 0; i < length; i += 1) {
+ tail[i >> 2] |= buff.charCodeAt(i) << (i % 4 << 3);
+ }
+ this._finish(tail, length);
+ ret = hex(this._hash);
+ if (raw) {
+ ret = hexToBinaryString(ret);
+ }
+ this.reset();
+ return ret;
+ };
+ SparkMD5.prototype.reset = function() {
+ this._buff = "";
+ this._length = 0;
+ this._hash = [ 1732584193, -271733879, -1732584194, 271733878 ];
+ return this;
+ };
+ SparkMD5.prototype.getState = function() {
+ return {
+ buff: this._buff,
+ length: this._length,
+ hash: this._hash
+ };
+ };
+ SparkMD5.prototype.setState = function(state) {
+ this._buff = state.buff;
+ this._length = state.length;
+ this._hash = state.hash;
+ return this;
+ };
+ SparkMD5.prototype.destroy = function() {
+ delete this._hash;
+ delete this._buff;
+ delete this._length;
+ };
+ SparkMD5.prototype._finish = function(tail, length) {
+ var i = length, tmp, lo, hi;
+ tail[i >> 2] |= 128 << (i % 4 << 3);
+ if (i > 55) {
+ md5cycle(this._hash, tail);
+ for (i = 0; i < 16; i += 1) {
+ tail[i] = 0;
+ }
+ }
+ tmp = this._length * 8;
+ tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
+ lo = parseInt(tmp[2], 16);
+ hi = parseInt(tmp[1], 16) || 0;
+ tail[14] = lo;
+ tail[15] = hi;
+ md5cycle(this._hash, tail);
+ };
+ SparkMD5.hash = function(str, raw) {
+ return SparkMD5.hashBinary(toUtf8(str), raw);
+ };
+ SparkMD5.hashBinary = function(content, raw) {
+ var hash = md51(content), ret = hex(hash);
+ return raw ? hexToBinaryString(ret) : ret;
+ };
+ SparkMD5.ArrayBuffer = function() {
+ this.reset();
+ };
+ SparkMD5.ArrayBuffer.prototype.append = function(arr) {
+ var buff = concatenateArrayBuffers(this._buff.buffer, arr, true), length = buff.length, i;
+ this._length += arr.byteLength;
+ for (i = 64; i <= length; i += 64) {
+ md5cycle(this._hash, md5blk_array(buff.subarray(i - 64, i)));
+ }
+ this._buff = i - 64 < length ? new Uint8Array(buff.buffer.slice(i - 64)) : new Uint8Array(0);
+ return this;
+ };
+ SparkMD5.ArrayBuffer.prototype.end = function(raw) {
+ var buff = this._buff, length = buff.length, tail = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ], i, ret;
+ for (i = 0; i < length; i += 1) {
+ tail[i >> 2] |= buff[i] << (i % 4 << 3);
+ }
+ this._finish(tail, length);
+ ret = hex(this._hash);
+ if (raw) {
+ ret = hexToBinaryString(ret);
+ }
+ this.reset();
+ return ret;
+ };
+ SparkMD5.ArrayBuffer.prototype.reset = function() {
+ this._buff = new Uint8Array(0);
+ this._length = 0;
+ this._hash = [ 1732584193, -271733879, -1732584194, 271733878 ];
+ return this;
+ };
+ SparkMD5.ArrayBuffer.prototype.getState = function() {
+ var state = SparkMD5.prototype.getState.call(this);
+ state.buff = arrayBuffer2Utf8Str(state.buff);
+ return state;
+ };
+ SparkMD5.ArrayBuffer.prototype.setState = function(state) {
+ state.buff = utf8Str2ArrayBuffer(state.buff, true);
+ return SparkMD5.prototype.setState.call(this, state);
+ };
+ SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy;
+ SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish;
+ SparkMD5.ArrayBuffer.hash = function(arr, raw) {
+ var hash = md51_array(new Uint8Array(arr)), ret = hex(hash);
+ return raw ? hexToBinaryString(ret) : ret;
+ };
+ return SparkMD5;
+ });
+ });
+ var classCallCheck = function(instance, Constructor) {
+ if (!(instance instanceof Constructor)) {
+ throw new TypeError("Cannot call a class as a function");
+ }
+ };
+ var createClass = function() {
+ function defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+ return function(Constructor, protoProps, staticProps) {
+ if (protoProps) defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) defineProperties(Constructor, staticProps);
+ return Constructor;
+ };
+ }();
+ var fileSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
+ var FileChecksum = function() {
+ createClass(FileChecksum, null, [ {
+ key: "create",
+ value: function create(file, callback) {
+ var instance = new FileChecksum(file);
+ instance.create(callback);
+ }
+ } ]);
+ function FileChecksum(file) {
+ classCallCheck(this, FileChecksum);
+ this.file = file;
+ this.chunkSize = 2097152;
+ this.chunkCount = Math.ceil(this.file.size / this.chunkSize);
+ this.chunkIndex = 0;
+ }
+ createClass(FileChecksum, [ {
+ key: "create",
+ value: function create(callback) {
+ var _this = this;
+ this.callback = callback;
+ this.md5Buffer = new sparkMd5.ArrayBuffer();
+ this.fileReader = new FileReader();
+ this.fileReader.addEventListener("load", function(event) {
+ return _this.fileReaderDidLoad(event);
+ });
+ this.fileReader.addEventListener("error", function(event) {
+ return _this.fileReaderDidError(event);
+ });
+ this.readNextChunk();
+ }
+ }, {
+ key: "fileReaderDidLoad",
+ value: function fileReaderDidLoad(event) {
+ this.md5Buffer.append(event.target.result);
+ if (!this.readNextChunk()) {
+ var binaryDigest = this.md5Buffer.end(true);
+ var base64digest = btoa(binaryDigest);
+ this.callback(null, base64digest);
+ }
+ }
+ }, {
+ key: "fileReaderDidError",
+ value: function fileReaderDidError(event) {
+ this.callback("Error reading " + this.file.name);
+ }
+ }, {
+ key: "readNextChunk",
+ value: function readNextChunk() {
+ if (this.chunkIndex < this.chunkCount || this.chunkIndex == 0 && this.chunkCount == 0) {
+ var start = this.chunkIndex * this.chunkSize;
+ var end = Math.min(start + this.chunkSize, this.file.size);
+ var bytes = fileSlice.call(this.file, start, end);
+ this.fileReader.readAsArrayBuffer(bytes);
+ this.chunkIndex++;
+ return true;
+ } else {
+ return false;
+ }
+ }
+ } ]);
+ return FileChecksum;
+ }();
+ function getMetaValue(name) {
+ var element = findElement(document.head, 'meta[name="' + name + '"]');
+ if (element) {
+ return element.getAttribute("content");
+ }
+ }
+ function findElements(root, selector) {
+ if (typeof root == "string") {
+ selector = root;
+ root = document;
+ }
+ var elements = root.querySelectorAll(selector);
+ return toArray$1(elements);
+ }
+ function findElement(root, selector) {
+ if (typeof root == "string") {
+ selector = root;
+ root = document;
+ }
+ return root.querySelector(selector);
+ }
+ function dispatchEvent(element, type) {
+ var eventInit = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
+ var disabled = element.disabled;
+ var bubbles = eventInit.bubbles, cancelable = eventInit.cancelable, detail = eventInit.detail;
+ var event = document.createEvent("Event");
+ event.initEvent(type, bubbles || true, cancelable || true);
+ event.detail = detail || {};
+ try {
+ element.disabled = false;
+ element.dispatchEvent(event);
+ } finally {
+ element.disabled = disabled;
+ }
+ return event;
+ }
+ function toArray$1(value) {
+ if (Array.isArray(value)) {
+ return value;
+ } else if (Array.from) {
+ return Array.from(value);
+ } else {
+ return [].slice.call(value);
+ }
+ }
+ var BlobRecord = function() {
+ function BlobRecord(file, checksum, url) {
+ var _this = this;
+ classCallCheck(this, BlobRecord);
+ this.file = file;
+ this.attributes = {
+ filename: file.name,
+ content_type: file.type,
+ byte_size: file.size,
+ checksum: checksum
+ };
+ this.xhr = new XMLHttpRequest();
+ this.xhr.open("POST", url, true);
+ this.xhr.responseType = "json";
+ this.xhr.setRequestHeader("Content-Type", "application/json");
+ this.xhr.setRequestHeader("Accept", "application/json");
+ this.xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest");
+ this.xhr.setRequestHeader("X-CSRF-Token", getMetaValue("csrf-token"));
+ this.xhr.addEventListener("load", function(event) {
+ return _this.requestDidLoad(event);
+ });
+ this.xhr.addEventListener("error", function(event) {
+ return _this.requestDidError(event);
+ });
+ }
+ createClass(BlobRecord, [ {
+ key: "create",
+ value: function create(callback) {
+ this.callback = callback;
+ this.xhr.send(JSON.stringify({
+ blob: this.attributes
+ }));
+ }
+ }, {
+ key: "requestDidLoad",
+ value: function requestDidLoad(event) {
+ if (this.status >= 200 && this.status < 300) {
+ var response = this.response;
+ var direct_upload = response.direct_upload;
+ delete response.direct_upload;
+ this.attributes = response;
+ this.directUploadData = direct_upload;
+ this.callback(null, this.toJSON());
+ } else {
+ this.requestDidError(event);
+ }
+ }
+ }, {
+ key: "requestDidError",
+ value: function requestDidError(event) {
+ this.callback('Error creating Blob for "' + this.file.name + '". Status: ' + this.status);
+ }
+ }, {
+ key: "toJSON",
+ value: function toJSON() {
+ var result = {};
+ for (var key in this.attributes) {
+ result[key] = this.attributes[key];
+ }
+ return result;
+ }
+ }, {
+ key: "status",
+ get: function get$$1() {
+ return this.xhr.status;
+ }
+ }, {
+ key: "response",
+ get: function get$$1() {
+ var _xhr = this.xhr, responseType = _xhr.responseType, response = _xhr.response;
+ if (responseType == "json") {
+ return response;
+ } else {
+ return JSON.parse(response);
+ }
+ }
+ } ]);
+ return BlobRecord;
+ }();
+ var BlobUpload = function() {
+ function BlobUpload(blob) {
+ var _this = this;
+ classCallCheck(this, BlobUpload);
+ this.blob = blob;
+ this.file = blob.file;
+ var _blob$directUploadDat = blob.directUploadData, url = _blob$directUploadDat.url, headers = _blob$directUploadDat.headers;
+ this.xhr = new XMLHttpRequest();
+ this.xhr.open("PUT", url, true);
+ this.xhr.responseType = "text";
+ for (var key in headers) {
+ this.xhr.setRequestHeader(key, headers[key]);
+ }
+ this.xhr.addEventListener("load", function(event) {
+ return _this.requestDidLoad(event);
+ });
+ this.xhr.addEventListener("error", function(event) {
+ return _this.requestDidError(event);
+ });
+ }
+ createClass(BlobUpload, [ {
+ key: "create",
+ value: function create(callback) {
+ this.callback = callback;
+ this.xhr.send(this.file.slice());
+ }
+ }, {
+ key: "requestDidLoad",
+ value: function requestDidLoad(event) {
+ var _xhr = this.xhr, status = _xhr.status, response = _xhr.response;
+ if (status >= 200 && status < 300) {
+ this.callback(null, response);
+ } else {
+ this.requestDidError(event);
+ }
+ }
+ }, {
+ key: "requestDidError",
+ value: function requestDidError(event) {
+ this.callback('Error storing "' + this.file.name + '". Status: ' + this.xhr.status);
+ }
+ } ]);
+ return BlobUpload;
+ }();
+ var id = 0;
+ var DirectUpload = function() {
+ function DirectUpload(file, url, delegate) {
+ classCallCheck(this, DirectUpload);
+ this.id = ++id;
+ this.file = file;
+ this.url = url;
+ this.delegate = delegate;
+ }
+ createClass(DirectUpload, [ {
+ key: "create",
+ value: function create(callback) {
+ var _this = this;
+ FileChecksum.create(this.file, function(error, checksum) {
+ if (error) {
+ callback(error);
+ return;
+ }
+ var blob = new BlobRecord(_this.file, checksum, _this.url);
+ notify(_this.delegate, "directUploadWillCreateBlobWithXHR", blob.xhr);
+ blob.create(function(error) {
+ if (error) {
+ callback(error);
+ } else {
+ var upload = new BlobUpload(blob);
+ notify(_this.delegate, "directUploadWillStoreFileWithXHR", upload.xhr);
+ upload.create(function(error) {
+ if (error) {
+ callback(error);
+ } else {
+ callback(null, blob.toJSON());
+ }
+ });
+ }
+ });
+ });
+ }
+ } ]);
+ return DirectUpload;
+ }();
+ function notify(object, methodName) {
+ if (object && typeof object[methodName] == "function") {
+ for (var _len = arguments.length, messages = Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
+ messages[_key - 2] = arguments[_key];
+ }
+ return object[methodName].apply(object, messages);
+ }
+ }
+ var DirectUploadController = function() {
+ function DirectUploadController(input, file) {
+ classCallCheck(this, DirectUploadController);
+ this.input = input;
+ this.file = file;
+ this.directUpload = new DirectUpload(this.file, this.url, this);
+ this.dispatch("initialize");
+ }
+ createClass(DirectUploadController, [ {
+ key: "start",
+ value: function start(callback) {
+ var _this = this;
+ var hiddenInput = document.createElement("input");
+ hiddenInput.type = "hidden";
+ hiddenInput.name = this.input.name;
+ this.input.insertAdjacentElement("beforebegin", hiddenInput);
+ this.dispatch("start");
+ this.directUpload.create(function(error, attributes) {
+ if (error) {
+ hiddenInput.parentNode.removeChild(hiddenInput);
+ _this.dispatchError(error);
+ } else {
+ hiddenInput.value = attributes.signed_id;
+ }
+ _this.dispatch("end");
+ callback(error);
+ });
+ }
+ }, {
+ key: "uploadRequestDidProgress",
+ value: function uploadRequestDidProgress(event) {
+ var progress = event.loaded / event.total * 100;
+ if (progress) {
+ this.dispatch("progress", {
+ progress: progress
+ });
+ }
+ }
+ }, {
+ key: "dispatch",
+ value: function dispatch(name) {
+ var detail = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+ detail.file = this.file;
+ detail.id = this.directUpload.id;
+ return dispatchEvent(this.input, "direct-upload:" + name, {
+ detail: detail
+ });
+ }
+ }, {
+ key: "dispatchError",
+ value: function dispatchError(error) {
+ var event = this.dispatch("error", {
+ error: error
+ });
+ if (!event.defaultPrevented) {
+ alert(error);
+ }
+ }
+ }, {
+ key: "directUploadWillCreateBlobWithXHR",
+ value: function directUploadWillCreateBlobWithXHR(xhr) {
+ this.dispatch("before-blob-request", {
+ xhr: xhr
+ });
+ }
+ }, {
+ key: "directUploadWillStoreFileWithXHR",
+ value: function directUploadWillStoreFileWithXHR(xhr) {
+ var _this2 = this;
+ this.dispatch("before-storage-request", {
+ xhr: xhr
+ });
+ xhr.upload.addEventListener("progress", function(event) {
+ return _this2.uploadRequestDidProgress(event);
+ });
+ }
+ }, {
+ key: "url",
+ get: function get$$1() {
+ return this.input.getAttribute("data-direct-upload-url");
+ }
+ } ]);
+ return DirectUploadController;
+ }();
+ var inputSelector = "input[type=file][data-direct-upload-url]:not([disabled])";
+ var DirectUploadsController = function() {
+ function DirectUploadsController(form) {
+ classCallCheck(this, DirectUploadsController);
+ this.form = form;
+ this.inputs = findElements(form, inputSelector).filter(function(input) {
+ return input.files.length;
+ });
+ }
+ createClass(DirectUploadsController, [ {
+ key: "start",
+ value: function start(callback) {
+ var _this = this;
+ var controllers = this.createDirectUploadControllers();
+ var startNextController = function startNextController() {
+ var controller = controllers.shift();
+ if (controller) {
+ controller.start(function(error) {
+ if (error) {
+ callback(error);
+ _this.dispatch("end");
+ } else {
+ startNextController();
+ }
+ });
+ } else {
+ callback();
+ _this.dispatch("end");
+ }
+ };
+ this.dispatch("start");
+ startNextController();
+ }
+ }, {
+ key: "createDirectUploadControllers",
+ value: function createDirectUploadControllers() {
+ var controllers = [];
+ this.inputs.forEach(function(input) {
+ toArray$1(input.files).forEach(function(file) {
+ var controller = new DirectUploadController(input, file);
+ controllers.push(controller);
+ });
+ });
+ return controllers;
+ }
+ }, {
+ key: "dispatch",
+ value: function dispatch(name) {
+ var detail = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+ return dispatchEvent(this.form, "direct-uploads:" + name, {
+ detail: detail
+ });
+ }
+ } ]);
+ return DirectUploadsController;
+ }();
+ var processingAttribute = "data-direct-uploads-processing";
+ var submitButtonsByForm = new WeakMap();
+ var started = false;
+ function start() {
+ if (!started) {
+ started = true;
+ document.addEventListener("click", didClick, true);
+ document.addEventListener("submit", didSubmitForm);
+ document.addEventListener("ajax:before", didSubmitRemoteElement);
+ }
+ }
+ function didClick(event) {
+ var target = event.target;
+ if (target.tagName == "INPUT" && target.type == "submit" && target.form) {
+ submitButtonsByForm.set(target.form, target);
+ }
+ }
+ function didSubmitForm(event) {
+ handleFormSubmissionEvent(event);
+ }
+ function didSubmitRemoteElement(event) {
+ if (event.target.tagName == "FORM") {
+ handleFormSubmissionEvent(event);
+ }
+ }
+ function handleFormSubmissionEvent(event) {
+ var form = event.target;
+ if (form.hasAttribute(processingAttribute)) {
+ event.preventDefault();
+ return;
+ }
+ var controller = new DirectUploadsController(form);
+ var inputs = controller.inputs;
+ if (inputs.length) {
+ event.preventDefault();
+ form.setAttribute(processingAttribute, "");
+ inputs.forEach(disable);
+ controller.start(function(error) {
+ form.removeAttribute(processingAttribute);
+ if (error) {
+ inputs.forEach(enable);
+ } else {
+ submitForm(form);
+ }
+ });
+ }
+ }
+ function submitForm(form) {
+ var button = submitButtonsByForm.get(form) || findElement(form, "input[type=submit]");
+ if (button) {
+ var _button = button, disabled = _button.disabled;
+ button.disabled = false;
+ button.focus();
+ button.click();
+ button.disabled = disabled;
+ } else {
+ button = document.createElement("input");
+ button.type = "submit";
+ button.style.display = "none";
+ form.appendChild(button);
+ button.click();
+ form.removeChild(button);
+ }
+ submitButtonsByForm.delete(form);
+ }
+ function disable(input) {
+ input.disabled = true;
+ }
+ function enable(input) {
+ input.disabled = false;
+ }
+ function autostart() {
+ if (window.ActiveStorage) {
+ start();
+ }
+ }
+ setTimeout(autostart, 1);
+ exports.start = start;
+ exports.DirectUpload = DirectUpload;
+ Object.defineProperty(exports, "__esModule", {
+ value: true
+ });
+});
diff --git a/activestorage/app/controllers/active_storage/base_controller.rb b/activestorage/app/controllers/active_storage/base_controller.rb
new file mode 100644
index 0000000000..b27d2bd8aa
--- /dev/null
+++ b/activestorage/app/controllers/active_storage/base_controller.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+# The base class for all Active Storage controllers.
+class ActiveStorage::BaseController < ActionController::Base
+ include ActiveStorage::SetCurrent
+
+ protect_from_forgery with: :exception
+end
diff --git a/activestorage/app/controllers/active_storage/blobs_controller.rb b/activestorage/app/controllers/active_storage/blobs_controller.rb
index a17e3852f9..4fc3fbe824 100644
--- a/activestorage/app/controllers/active_storage/blobs_controller.rb
+++ b/activestorage/app/controllers/active_storage/blobs_controller.rb
@@ -4,13 +4,11 @@
# Note: These URLs are publicly accessible. If you need to enforce access protection beyond the
# security-through-obscurity factor of the signed blob references, you'll need to implement your own
# authenticated redirection controller.
-class ActiveStorage::BlobsController < ActionController::Base
+class ActiveStorage::BlobsController < ActiveStorage::BaseController
+ include ActiveStorage::SetBlob
+
def show
- if blob = ActiveStorage::Blob.find_signed(params[:signed_id])
- expires_in ActiveStorage::Blob.service.url_expires_in
- redirect_to blob.service_url(disposition: params[:disposition])
- else
- head :not_found
- end
+ expires_in ActiveStorage.service_urls_expire_in
+ redirect_to @blob.service_url(disposition: params[:disposition])
end
end
diff --git a/activestorage/app/controllers/active_storage/direct_uploads_controller.rb b/activestorage/app/controllers/active_storage/direct_uploads_controller.rb
index 205d173648..78b43fc94c 100644
--- a/activestorage/app/controllers/active_storage/direct_uploads_controller.rb
+++ b/activestorage/app/controllers/active_storage/direct_uploads_controller.rb
@@ -3,7 +3,7 @@
# Creates a new blob on the server side in anticipation of a direct-to-service upload from the client side.
# When the client-side upload is completed, the signed_blob_id can be submitted as part of the form to reference
# the blob that was created up front.
-class ActiveStorage::DirectUploadsController < ActionController::Base
+class ActiveStorage::DirectUploadsController < ActiveStorage::BaseController
def create
blob = ActiveStorage::Blob.create_before_direct_upload!(blob_args)
render json: direct_upload_json(blob)
@@ -15,7 +15,7 @@ class ActiveStorage::DirectUploadsController < ActionController::Base
end
def direct_upload_json(blob)
- blob.as_json(methods: :signed_id).merge(direct_upload: {
+ blob.as_json(root: false, methods: :signed_id).merge(direct_upload: {
url: blob.service_url_for_direct_upload,
headers: blob.service_headers_for_direct_upload
})
diff --git a/activestorage/app/controllers/active_storage/disk_controller.rb b/activestorage/app/controllers/active_storage/disk_controller.rb
index a4fd427cb2..75cc11d6ff 100644
--- a/activestorage/app/controllers/active_storage/disk_controller.rb
+++ b/activestorage/app/controllers/active_storage/disk_controller.rb
@@ -4,11 +4,12 @@
# This means using expiring, signed URLs that are meant for immediate access, not permanent linking.
# Always go through the BlobsController, or your own authenticated controller, rather than directly
# to the service url.
-class ActiveStorage::DiskController < ActionController::Base
+class ActiveStorage::DiskController < ActiveStorage::BaseController
+ skip_forgery_protection
+
def show
if key = decode_verified_key
- send_data disk_service.download(key),
- disposition: params[:disposition], content_type: params[:content_type]
+ serve_file disk_service.path_for(key), content_type: params[:content_type], disposition: params[:disposition]
else
head :not_found
end
@@ -38,6 +39,20 @@ class ActiveStorage::DiskController < ActionController::Base
ActiveStorage.verifier.verified(params[:encoded_key], purpose: :blob_key)
end
+ def serve_file(path, content_type:, disposition:)
+ Rack::File.new(nil).serving(request, path).tap do |(status, headers, body)|
+ self.status = status
+ self.response_body = body
+
+ headers.each do |name, value|
+ response.headers[name] = value
+ end
+
+ response.headers["Content-Type"] = content_type || DEFAULT_SEND_FILE_TYPE
+ response.headers["Content-Disposition"] = disposition || DEFAULT_SEND_FILE_DISPOSITION
+ end
+ end
+
def decode_verified_token
ActiveStorage.verifier.verified(params[:encoded_token], purpose: :blob_token)
diff --git a/activestorage/app/controllers/active_storage/previews_controller.rb b/activestorage/app/controllers/active_storage/previews_controller.rb
deleted file mode 100644
index 9e8cf27b6e..0000000000
--- a/activestorage/app/controllers/active_storage/previews_controller.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-# frozen_string_literal: true
-
-class ActiveStorage::PreviewsController < ActionController::Base
- def show
- if blob = ActiveStorage::Blob.find_signed(params[:signed_blob_id])
- expires_in ActiveStorage::Blob.service.url_expires_in
- redirect_to ActiveStorage::Preview.new(blob, params[:variation_key]).processed.service_url(disposition: params[:disposition])
- else
- head :not_found
- end
- end
-end
diff --git a/activestorage/app/controllers/active_storage/representations_controller.rb b/activestorage/app/controllers/active_storage/representations_controller.rb
new file mode 100644
index 0000000000..98e11e5dbb
--- /dev/null
+++ b/activestorage/app/controllers/active_storage/representations_controller.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+# Take a signed permanent reference for a blob representation and turn it into an expiring service URL for download.
+# Note: These URLs are publicly accessible. If you need to enforce access protection beyond the
+# security-through-obscurity factor of the signed blob and variation reference, you'll need to implement your own
+# authenticated redirection controller.
+class ActiveStorage::RepresentationsController < ActiveStorage::BaseController
+ include ActiveStorage::SetBlob
+
+ def show
+ expires_in ActiveStorage.service_urls_expire_in
+ redirect_to @blob.representation(params[:variation_key]).processed.service_url(disposition: params[:disposition])
+ end
+end
diff --git a/activestorage/app/controllers/active_storage/variants_controller.rb b/activestorage/app/controllers/active_storage/variants_controller.rb
deleted file mode 100644
index dc5e78ecc0..0000000000
--- a/activestorage/app/controllers/active_storage/variants_controller.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-# Take a signed permanent reference for a variant and turn it into an expiring service URL for download.
-# Note: These URLs are publicly accessible. If you need to enforce access protection beyond the
-# security-through-obscurity factor of the signed blob and variation reference, you'll need to implement your own
-# authenticated redirection controller.
-class ActiveStorage::VariantsController < ActionController::Base
- def show
- if blob = ActiveStorage::Blob.find_signed(params[:signed_blob_id])
- expires_in ActiveStorage::Blob.service.url_expires_in
- redirect_to ActiveStorage::Variant.new(blob, params[:variation_key]).processed.service_url(disposition: params[:disposition])
- else
- head :not_found
- end
- end
-end
diff --git a/activestorage/app/controllers/concerns/active_storage/set_blob.rb b/activestorage/app/controllers/concerns/active_storage/set_blob.rb
new file mode 100644
index 0000000000..f072954d78
--- /dev/null
+++ b/activestorage/app/controllers/concerns/active_storage/set_blob.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module ActiveStorage::SetBlob #:nodoc:
+ extend ActiveSupport::Concern
+
+ included do
+ before_action :set_blob
+ end
+
+ private
+ def set_blob
+ @blob = ActiveStorage::Blob.find_signed(params[:signed_blob_id] || params[:signed_id])
+ rescue ActiveSupport::MessageVerifier::InvalidSignature
+ head :not_found
+ end
+end
diff --git a/activestorage/app/controllers/concerns/active_storage/set_current.rb b/activestorage/app/controllers/concerns/active_storage/set_current.rb
new file mode 100644
index 0000000000..597afe7064
--- /dev/null
+++ b/activestorage/app/controllers/concerns/active_storage/set_current.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+# Sets the <tt>ActiveStorage::Current.host</tt> attribute, which the disk service uses to generate URLs.
+# Include this concern in custom controllers that call ActiveStorage::Blob#service_url,
+# ActiveStorage::Variant#service_url, or ActiveStorage::Preview#service_url so the disk service can
+# generate URLs using the same host, protocol, and base path as the current request.
+module ActiveStorage::SetCurrent
+ extend ActiveSupport::Concern
+
+ included do
+ before_action do
+ ActiveStorage::Current.host = request.base_url
+ end
+ end
+end
diff --git a/activestorage/app/javascript/activestorage/blob_record.js b/activestorage/app/javascript/activestorage/blob_record.js
index 3c6e6b6ba1..ff847892b2 100644
--- a/activestorage/app/javascript/activestorage/blob_record.js
+++ b/activestorage/app/javascript/activestorage/blob_record.js
@@ -22,14 +22,28 @@ export class BlobRecord {
this.xhr.addEventListener("error", event => this.requestDidError(event))
}
+ get status() {
+ return this.xhr.status
+ }
+
+ get response() {
+ const { responseType, response } = this.xhr
+ if (responseType == "json") {
+ return response
+ } else {
+ // Shim for IE 11: https://connect.microsoft.com/IE/feedback/details/794808
+ return JSON.parse(response)
+ }
+ }
+
create(callback) {
this.callback = callback
this.xhr.send(JSON.stringify({ blob: this.attributes }))
}
requestDidLoad(event) {
- const { status, response } = this.xhr
- if (status >= 200 && status < 300) {
+ if (this.status >= 200 && this.status < 300) {
+ const { response } = this
const { direct_upload } = response
delete response.direct_upload
this.attributes = response
@@ -41,7 +55,7 @@ export class BlobRecord {
}
requestDidError(event) {
- this.callback(`Error creating Blob for "${this.file.name}". Status: ${this.xhr.status}`)
+ this.callback(`Error creating Blob for "${this.file.name}". Status: ${this.status}`)
}
toJSON() {
diff --git a/activestorage/app/javascript/activestorage/blob_upload.js b/activestorage/app/javascript/activestorage/blob_upload.js
index 180a7415e7..277cc8ff8e 100644
--- a/activestorage/app/javascript/activestorage/blob_upload.js
+++ b/activestorage/app/javascript/activestorage/blob_upload.js
@@ -17,7 +17,7 @@ export class BlobUpload {
create(callback) {
this.callback = callback
- this.xhr.send(this.file)
+ this.xhr.send(this.file.slice())
}
requestDidLoad(event) {
diff --git a/activestorage/app/javascript/activestorage/direct_upload.js b/activestorage/app/javascript/activestorage/direct_upload.js
index 7085e0a4ab..c2eedf289b 100644
--- a/activestorage/app/javascript/activestorage/direct_upload.js
+++ b/activestorage/app/javascript/activestorage/direct_upload.js
@@ -14,8 +14,14 @@ export class DirectUpload {
create(callback) {
FileChecksum.create(this.file, (error, checksum) => {
+ if (error) {
+ callback(error)
+ return
+ }
+
const blob = new BlobRecord(this.file, checksum, this.url)
notify(this.delegate, "directUploadWillCreateBlobWithXHR", blob.xhr)
+
blob.create(error => {
if (error) {
callback(error)
diff --git a/activestorage/app/javascript/activestorage/file_checksum.js b/activestorage/app/javascript/activestorage/file_checksum.js
index ffaec1a128..a9dbef69ea 100644
--- a/activestorage/app/javascript/activestorage/file_checksum.js
+++ b/activestorage/app/javascript/activestorage/file_checksum.js
@@ -39,7 +39,7 @@ export class FileChecksum {
}
readNextChunk() {
- if (this.chunkIndex < this.chunkCount) {
+ if (this.chunkIndex < this.chunkCount || (this.chunkIndex == 0 && this.chunkCount == 0)) {
const start = this.chunkIndex * this.chunkSize
const end = Math.min(start + this.chunkSize, this.file.size)
const bytes = fileSlice.call(this.file, start, end)
diff --git a/activestorage/app/javascript/activestorage/helpers.js b/activestorage/app/javascript/activestorage/helpers.js
index 52fec8f6f1..7e83c447e7 100644
--- a/activestorage/app/javascript/activestorage/helpers.js
+++ b/activestorage/app/javascript/activestorage/helpers.js
@@ -23,11 +23,20 @@ export function findElement(root, selector) {
}
export function dispatchEvent(element, type, eventInit = {}) {
+ const { disabled } = element
const { bubbles, cancelable, detail } = eventInit
const event = document.createEvent("Event")
+
event.initEvent(type, bubbles || true, cancelable || true)
event.detail = detail || {}
- element.dispatchEvent(event)
+
+ try {
+ element.disabled = false
+ element.dispatchEvent(event)
+ } finally {
+ element.disabled = disabled
+ }
+
return event
}
diff --git a/activestorage/app/javascript/activestorage/ujs.js b/activestorage/app/javascript/activestorage/ujs.js
index 1dda02936f..f5353389ef 100644
--- a/activestorage/app/javascript/activestorage/ujs.js
+++ b/activestorage/app/javascript/activestorage/ujs.js
@@ -2,16 +2,25 @@ import { DirectUploadsController } from "./direct_uploads_controller"
import { findElement } from "./helpers"
const processingAttribute = "data-direct-uploads-processing"
+const submitButtonsByForm = new WeakMap
let started = false
export function start() {
if (!started) {
started = true
+ document.addEventListener("click", didClick, true)
document.addEventListener("submit", didSubmitForm)
document.addEventListener("ajax:before", didSubmitRemoteElement)
}
}
+function didClick(event) {
+ const { target } = event
+ if (target.tagName == "INPUT" && target.type == "submit" && target.form) {
+ submitButtonsByForm.set(target.form, target)
+ }
+}
+
function didSubmitForm(event) {
handleFormSubmissionEvent(event)
}
@@ -49,7 +58,8 @@ function handleFormSubmissionEvent(event) {
}
function submitForm(form) {
- let button = findElement(form, "input[type=submit]")
+ let button = submitButtonsByForm.get(form) || findElement(form, "input[type=submit]")
+
if (button) {
const { disabled } = button
button.disabled = false
@@ -59,11 +69,12 @@ function submitForm(form) {
} else {
button = document.createElement("input")
button.type = "submit"
- button.style = "display:none"
+ button.style.display = "none"
form.appendChild(button)
button.click()
form.removeChild(button)
}
+ submitButtonsByForm.delete(form)
}
function disable(input) {
diff --git a/activestorage/app/jobs/active_storage/analyze_job.rb b/activestorage/app/jobs/active_storage/analyze_job.rb
index 2a952f9f74..804ee4557a 100644
--- a/activestorage/app/jobs/active_storage/analyze_job.rb
+++ b/activestorage/app/jobs/active_storage/analyze_job.rb
@@ -2,6 +2,8 @@
# Provides asynchronous analysis of ActiveStorage::Blob records via ActiveStorage::Blob#analyze_later.
class ActiveStorage::AnalyzeJob < ActiveStorage::BaseJob
+ retry_on ActiveStorage::IntegrityError, attempts: 10, wait: :exponentially_longer
+
def perform(blob)
blob.analyze
end
diff --git a/activestorage/app/jobs/active_storage/purge_job.rb b/activestorage/app/jobs/active_storage/purge_job.rb
index 98874d2250..2604977bf1 100644
--- a/activestorage/app/jobs/active_storage/purge_job.rb
+++ b/activestorage/app/jobs/active_storage/purge_job.rb
@@ -2,8 +2,8 @@
# Provides asynchronous purging of ActiveStorage::Blob records via ActiveStorage::Blob#purge_later.
class ActiveStorage::PurgeJob < ActiveStorage::BaseJob
- # FIXME: Limit this to a custom ActiveStorage error
- retry_on StandardError
+ discard_on ActiveRecord::RecordNotFound
+ retry_on ActiveRecord::Deadlocked, attempts: 10, wait: :exponentially_longer
def perform(blob)
blob.purge
diff --git a/activestorage/app/models/active_storage/attachment.rb b/activestorage/app/models/active_storage/attachment.rb
index 9f61a5dbf3..4bdd1c0224 100644
--- a/activestorage/app/models/active_storage/attachment.rb
+++ b/activestorage/app/models/active_storage/attachment.rb
@@ -14,22 +14,36 @@ class ActiveStorage::Attachment < ActiveRecord::Base
delegate_missing_to :blob
- after_create_commit :analyze_blob_later
+ after_create_commit :analyze_blob_later, :identify_blob
+ after_destroy_commit :purge_dependent_blob_later
- # Synchronously purges the blob (deletes it from the configured service) and destroys the attachment.
+ # Synchronously deletes the attachment and {purges the blob}[rdoc-ref:ActiveStorage::Blob#purge].
def purge
- blob.purge
- destroy
+ delete
+ blob&.purge
end
- # Destroys the attachment and asynchronously purges the blob (deletes it from the configured service).
+ # Deletes the attachment and {enqueues a background job}[rdoc-ref:ActiveStorage::Blob#purge_later] to purge the blob.
def purge_later
- blob.purge_later
- destroy
+ delete
+ blob&.purge_later
end
private
+ def identify_blob
+ blob.identify
+ end
+
def analyze_blob_later
blob.analyze_later unless blob.analyzed?
end
+
+ def purge_dependent_blob_later
+ blob&.purge_later if dependent == :purge_later
+ end
+
+
+ def dependent
+ record.attachment_reflections[name]&.options[:dependent]
+ end
end
diff --git a/activestorage/app/models/active_storage/blob.rb b/activestorage/app/models/active_storage/blob.rb
index 99823e14c6..e7f2615b0f 100644
--- a/activestorage/app/models/active_storage/blob.rb
+++ b/activestorage/app/models/active_storage/blob.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require "active_storage/analyzer/null_analyzer"
+require "active_storage/downloader"
# A blob is a record that contains the metadata about a file and a key for where that file resides on the service.
# Blobs can be created in two ways:
@@ -16,19 +16,24 @@ require "active_storage/analyzer/null_analyzer"
# update a blob's metadata on a subsequent pass, but you should not update the key or change the uploaded file.
# If you need to create a derivative or otherwise change the blob, simply create a new blob and purge the old one.
class ActiveStorage::Blob < ActiveRecord::Base
- class UnpreviewableError < StandardError; end
- class UnrepresentableError < StandardError; end
+ require_dependency "active_storage/blob/analyzable"
+ require_dependency "active_storage/blob/identifiable"
+ require_dependency "active_storage/blob/representable"
+
+ include Analyzable
+ include Identifiable
+ include Representable
self.table_name = "active_storage_blobs"
has_secure_token :key
- store :metadata, accessors: [ :analyzed ], coder: JSON
+ store :metadata, accessors: [ :analyzed, :identified ], coder: ActiveRecord::Coders::JSON
class_attribute :service
has_many :attachments
- has_one_attached :preview_image
+ scope :unattached, -> { left_joins(:attachments).where(ActiveStorage::Attachment.table_name => { blob_id: nil }) }
class << self
# You can used the signed ID of a blob to refer to it on the client side without fear of tampering.
@@ -41,21 +46,25 @@ class ActiveStorage::Blob < ActiveRecord::Base
end
# Returns a new, unsaved blob instance after the +io+ has been uploaded to the service.
- def build_after_upload(io:, filename:, content_type: nil, metadata: nil)
- new.tap do |blob|
- blob.filename = filename
- blob.content_type = content_type
- blob.metadata = metadata
+ # When providing a content type, pass <tt>identify: false</tt> to bypass automatic content type inference.
+ def build_after_upload(io:, filename:, content_type: nil, metadata: nil, identify: true)
+ new(filename: filename, content_type: content_type, metadata: metadata).tap do |blob|
+ blob.upload(io, identify: identify)
+ end
+ end
- blob.upload io
+ def build_after_unfurling(io:, filename:, content_type: nil, metadata: nil, identify: true) #:nodoc:
+ new(filename: filename, content_type: content_type, metadata: metadata).tap do |blob|
+ blob.unfurl(io, identify: identify)
end
end
# Returns a saved blob instance after the +io+ has been uploaded to the service. Note, the blob is first built,
# then the +io+ is uploaded, then the blob is saved. This is done this way to avoid uploading (which may take
# time), while having an open database transaction.
- def create_after_upload!(io:, filename:, content_type: nil, metadata: nil)
- build_after_upload(io: io, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
+ # When providing a content type, pass <tt>identify: false</tt> to bypass automatic content type inference.
+ def create_after_upload!(io:, filename:, content_type: nil, metadata: nil, identify: true)
+ build_after_upload(io: io, filename: filename, content_type: content_type, metadata: metadata, identify: identify).tap(&:save!)
end
# Returns a saved blob _without_ uploading a file to the service. This blob will point to a key where there is
@@ -68,7 +77,6 @@ class ActiveStorage::Blob < ActiveRecord::Base
end
end
-
# Returns a signed ID for this blob that's suitable for reference on the client-side without fear of tampering.
# It uses the framework-wide verifier on <tt>ActiveStorage.verifier</tt>, but with a dedicated purpose.
def signed_id
@@ -110,90 +118,21 @@ class ActiveStorage::Blob < ActiveRecord::Base
content_type.start_with?("text")
end
- # Returns an ActiveStorage::Variant instance with the set of +transformations+ provided. This is only relevant for image
- # files, and it allows any image to be transformed for size, colors, and the like. Example:
- #
- # avatar.variant(resize: "100x100").processed.service_url
- #
- # This will create and process a variant of the avatar blob that's constrained to a height and width of 100px.
- # Then it'll upload said variant to the service according to a derivative key of the blob and the transformations.
- #
- # Frequently, though, you don't actually want to transform the variant right away. But rather simply refer to a
- # specific variant that can be created by a controller on-demand. Like so:
- #
- # <%= image_tag Current.user.avatar.variant(resize: "100x100") %>
- #
- # This will create a URL for that specific blob with that specific variant, which the ActiveStorage::VariantsController
- # can then produce on-demand.
- def variant(transformations)
- ActiveStorage::Variant.new(self, ActiveStorage::Variation.wrap(transformations))
- end
-
-
- # Returns an ActiveStorage::Preview instance with the set of +transformations+ provided. A preview is an image generated
- # from a non-image blob. Active Storage comes with built-in previewers for videos and PDF documents. The video previewer
- # extracts the first frame from a video and the PDF previewer extracts the first page from a PDF document.
- #
- # blob.preview(resize: "100x100").processed.service_url
- #
- # Avoid processing previews synchronously in views. Instead, link to a controller action that processes them on demand.
- # Active Storage provides one, but you may want to create your own (for example, if you need authentication). Here’s
- # how to use the built-in version:
- #
- # <%= image_tag video.preview(resize: "100x100") %>
- #
- # This method raises ActiveStorage::Blob::UnpreviewableError if no previewer accepts the receiving blob. To determine
- # whether a blob is accepted by any previewer, call ActiveStorage::Blob#previewable?.
- def preview(transformations)
- if previewable?
- ActiveStorage::Preview.new(self, ActiveStorage::Variation.wrap(transformations))
- else
- raise UnpreviewableError
- end
- end
-
- # Returns true if any registered previewer accepts the blob. By default, this will return true for videos and PDF documents.
- def previewable?
- ActiveStorage.previewers.any? { |klass| klass.accept?(self) }
- end
-
-
- # Returns an ActiveStorage::Preview instance for a previewable blob or an ActiveStorage::Variant instance for an image blob.
- #
- # blob.representation(resize: "100x100").processed.service_url
- #
- # Raises ActiveStorage::Blob::UnrepresentableError if the receiving blob is neither an image nor previewable. Call
- # ActiveStorage::Blob#representable? to determine whether a blob is representable.
- #
- # See ActiveStorage::Blob#preview and ActiveStorage::Blob#variant for more information.
- def representation(transformations)
- case
- when previewable?
- preview transformations
- when image?
- variant transformations
- else
- raise UnrepresentableError
- end
- end
-
- # Returns true if the blob is an image or is previewable.
- def representable?
- image? || previewable?
- end
-
# Returns the URL of the blob on the service. This URL is intended to be short-lived for security and not used directly
# with users. Instead, the +service_url+ should only be exposed as a redirect from a stable, possibly authenticated URL.
# Hiding the +service_url+ behind a redirect also gives you the power to change services without updating all URLs. And
# it allows permanent URLs that redirect to the +service_url+ to be cached in the view.
- def service_url(expires_in: service.url_expires_in, disposition: "inline")
- service.url key, expires_in: expires_in, disposition: disposition, filename: filename, content_type: content_type
+ def service_url(expires_in: ActiveStorage.service_urls_expire_in, disposition: :inline, filename: nil, **options)
+ filename = ActiveStorage::Filename.wrap(filename || self.filename)
+
+ service.url key, expires_in: expires_in, filename: filename, content_type: content_type,
+ disposition: forcibly_serve_as_binary? ? :attachment : disposition, **options
end
# Returns a URL that can be used to directly upload a file for this blob on the service. This URL is intended to be
# short-lived for security and only generated on-demand by the client-side JavaScript responsible for doing the uploading.
- def service_url_for_direct_upload(expires_in: service.url_expires_in)
+ def service_url_for_direct_upload(expires_in: ActiveStorage.service_urls_expire_in)
service.url_for_direct_upload key, expires_in: expires_in, content_type: content_type, content_length: byte_size, checksum: checksum
end
@@ -202,21 +141,32 @@ class ActiveStorage::Blob < ActiveRecord::Base
service.headers_for_direct_upload key, filename: filename, content_type: content_type, content_length: byte_size, checksum: checksum
end
+
# Uploads the +io+ to the service on the +key+ for this blob. Blobs are intended to be immutable, so you shouldn't be
# using this method after a file has already been uploaded to fit with a blob. If you want to create a derivative blob,
# you should instead simply create a new blob based on the old one.
#
# Prior to uploading, we compute the checksum, which is sent to the service for transit integrity validation. If the
# checksum does not match what the service receives, an exception will be raised. We also measure the size of the +io+
- # and store that in +byte_size+ on the blob record.
+ # and store that in +byte_size+ on the blob record. The content type is automatically extracted from the +io+ unless
+ # you specify a +content_type+ and pass +identify+ as false.
#
# Normally, you do not have to call this method directly at all. Use the factory class methods of +build_after_upload+
# and +create_after_upload!+.
- def upload(io)
- self.checksum = compute_checksum_in_chunks(io)
- self.byte_size = io.size
+ def upload(io, identify: true)
+ unfurl io, identify: identify
+ upload_without_unfurling io
+ end
+
+ def unfurl(io, identify: true) #:nodoc:
+ self.checksum = compute_checksum_in_chunks(io)
+ self.content_type = extract_content_type(io) if content_type.nil? || identify
+ self.byte_size = io.size
+ self.identified = true
+ end
- service.upload(key, io, checksum: checksum)
+ def upload_without_unfurling(io) #:nodoc:
+ service.upload key, io, checksum: checksum
end
# Downloads the file associated with this blob. If no block is given, the entire file is read into memory and returned.
@@ -225,64 +175,43 @@ class ActiveStorage::Blob < ActiveRecord::Base
service.download key, &block
end
-
- # Extracts and stores metadata from the file associated with this blob using a relevant analyzer. Active Storage comes
- # with built-in analyzers for images and videos. See ActiveStorage::Analyzer::ImageAnalyzer and
- # ActiveStorage::Analyzer::VideoAnalyzer for information about the specific attributes they extract and the third-party
- # libraries they require.
- #
- # To choose the analyzer for a blob, Active Storage calls +accept?+ on each registered analyzer in order. It uses the
- # first analyzer for which +accept?+ returns true when given the blob. If no registered analyzer accepts the blob, no
- # metadata is extracted from it.
- #
- # In a Rails application, add or remove analyzers by manipulating +Rails.application.config.active_storage.analyzers+
- # in an initializer:
+ # Downloads the blob to a tempfile on disk. Yields the tempfile.
#
- # # Add a custom analyzer for Microsoft Office documents:
- # Rails.application.config.active_storage.analyzers.append DOCXAnalyzer
+ # The tempfile's name is prefixed with +ActiveStorage-+ and the blob's ID. Its extension matches that of the blob.
#
- # # Remove the built-in video analyzer:
- # Rails.application.config.active_storage.analyzers.delete ActiveStorage::Analyzer::VideoAnalyzer
+ # By default, the tempfile is created in <tt>Dir.tmpdir</tt>. Pass +tempdir:+ to create it in a different directory:
#
- # Outside of a Rails application, manipulate +ActiveStorage.analyzers+ instead.
+ # blob.open(tempdir: "/path/to/tmp") do |file|
+ # # ...
+ # end
#
- # You won't ordinarily need to call this method from a Rails application. New blobs are automatically and asynchronously
- # analyzed via #analyze_later when they're attached for the first time.
- def analyze
- update! metadata: extract_metadata_via_analyzer
- end
-
- # Enqueues an ActiveStorage::AnalyzeJob which calls #analyze.
+ # The tempfile is automatically closed and unlinked after the given block is executed.
#
- # This method is automatically called for a blob when it's attached for the first time. You can call it to analyze a blob
- # again (e.g. if you add a new analyzer or modify an existing one).
- def analyze_later
- ActiveStorage::AnalyzeJob.perform_later(self)
+ # Raises ActiveStorage::IntegrityError if the downloaded data does not match the blob's checksum.
+ def open(tempdir: nil, &block)
+ ActiveStorage::Downloader.new(self, tempdir: tempdir).download_blob_to_tempfile(&block)
end
- # Returns true if the blob has been analyzed.
- def analyzed?
- analyzed
- end
-
- # Deletes the file on the service that's associated with this blob. This should only be done if the blob is going to be
- # deleted as well or you will essentially have a dead reference. It's recommended to use the +#purge+ and +#purge_later+
+ # Deletes the files on the service associated with the blob. This should only be done if the blob is going to be
+ # deleted as well or you will essentially have a dead reference. It's recommended to use #purge and #purge_later
# methods in most circumstances.
def delete
- service.delete key
+ service.delete(key)
+ service.delete_prefixed("variants/#{key}/") if image?
end
# Deletes the file on the service and then destroys the blob record. This is the recommended way to dispose of unwanted
# blobs. Note, though, that deleting the file off the service will initiate a HTTP connection to the service, which may
- # be slow or prevented, so you should not use this method inside a transaction or in callbacks. Use +#purge_later+ instead.
+ # be slow or prevented, so you should not use this method inside a transaction or in callbacks. Use #purge_later instead.
def purge
- delete
destroy
+ delete
+ rescue ActiveRecord::InvalidForeignKey
end
- # Enqueues an ActiveStorage::PurgeJob job that'll call +purge+. This is the recommended way to purge blobs when the call
- # needs to be made from a transaction, a callback, or any other real-time scenario.
+ # Enqueues an ActiveStorage::PurgeJob to call #purge. This is the recommended way to purge blobs from a transaction,
+ # an Active Record callback, or in any other real-time scenario.
def purge_later
ActiveStorage::PurgeJob.perform_later(self)
end
@@ -298,16 +227,13 @@ class ActiveStorage::Blob < ActiveRecord::Base
end.base64digest
end
-
- def extract_metadata_via_analyzer
- analyzer.metadata.merge(analyzed: true)
+ def extract_content_type(io)
+ Marcel::MimeType.for io, name: filename.to_s, declared_type: content_type
end
- def analyzer
- analyzer_class.new(self)
+ def forcibly_serve_as_binary?
+ ActiveStorage.content_types_to_serve_as_binary.include?(content_type)
end
- def analyzer_class
- ActiveStorage.analyzers.detect { |klass| klass.accept?(self) } || ActiveStorage::Analyzer::NullAnalyzer
- end
+ ActiveSupport.run_load_hooks(:active_storage_blob, self)
end
diff --git a/activestorage/app/models/active_storage/blob/analyzable.rb b/activestorage/app/models/active_storage/blob/analyzable.rb
new file mode 100644
index 0000000000..5bda6e6d73
--- /dev/null
+++ b/activestorage/app/models/active_storage/blob/analyzable.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require "active_storage/analyzer/null_analyzer"
+
+module ActiveStorage::Blob::Analyzable
+ # Extracts and stores metadata from the file associated with this blob using a relevant analyzer. Active Storage comes
+ # with built-in analyzers for images and videos. See ActiveStorage::Analyzer::ImageAnalyzer and
+ # ActiveStorage::Analyzer::VideoAnalyzer for information about the specific attributes they extract and the third-party
+ # libraries they require.
+ #
+ # To choose the analyzer for a blob, Active Storage calls +accept?+ on each registered analyzer in order. It uses the
+ # first analyzer for which +accept?+ returns true when given the blob. If no registered analyzer accepts the blob, no
+ # metadata is extracted from it.
+ #
+ # In a Rails application, add or remove analyzers by manipulating +Rails.application.config.active_storage.analyzers+
+ # in an initializer:
+ #
+ # # Add a custom analyzer for Microsoft Office documents:
+ # Rails.application.config.active_storage.analyzers.append DOCXAnalyzer
+ #
+ # # Remove the built-in video analyzer:
+ # Rails.application.config.active_storage.analyzers.delete ActiveStorage::Analyzer::VideoAnalyzer
+ #
+ # Outside of a Rails application, manipulate +ActiveStorage.analyzers+ instead.
+ #
+ # You won't ordinarily need to call this method from a Rails application. New blobs are automatically and asynchronously
+ # analyzed via #analyze_later when they're attached for the first time.
+ def analyze
+ update! metadata: metadata.merge(extract_metadata_via_analyzer)
+ end
+
+ # Enqueues an ActiveStorage::AnalyzeJob which calls #analyze.
+ #
+ # This method is automatically called for a blob when it's attached for the first time. You can call it to analyze a blob
+ # again (e.g. if you add a new analyzer or modify an existing one).
+ def analyze_later
+ ActiveStorage::AnalyzeJob.perform_later(self)
+ end
+
+ # Returns true if the blob has been analyzed.
+ def analyzed?
+ analyzed
+ end
+
+ private
+ def extract_metadata_via_analyzer
+ analyzer.metadata.merge(analyzed: true)
+ end
+
+ def analyzer
+ analyzer_class.new(self)
+ end
+
+ def analyzer_class
+ ActiveStorage.analyzers.detect { |klass| klass.accept?(self) } || ActiveStorage::Analyzer::NullAnalyzer
+ end
+end
diff --git a/activestorage/app/models/active_storage/blob/identifiable.rb b/activestorage/app/models/active_storage/blob/identifiable.rb
new file mode 100644
index 0000000000..049e45dc3e
--- /dev/null
+++ b/activestorage/app/models/active_storage/blob/identifiable.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module ActiveStorage::Blob::Identifiable
+ def identify
+ update! content_type: identify_content_type, identified: true unless identified?
+ end
+
+ def identified?
+ identified
+ end
+
+ private
+ def identify_content_type
+ Marcel::MimeType.for download_identifiable_chunk, name: filename.to_s, declared_type: content_type
+ end
+
+ def download_identifiable_chunk
+ service.download_chunk key, 0...4.kilobytes
+ end
+end
diff --git a/activestorage/app/models/active_storage/blob/representable.rb b/activestorage/app/models/active_storage/blob/representable.rb
new file mode 100644
index 0000000000..03d5511481
--- /dev/null
+++ b/activestorage/app/models/active_storage/blob/representable.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module ActiveStorage::Blob::Representable
+ extend ActiveSupport::Concern
+
+ included do
+ has_one_attached :preview_image
+ end
+
+ # Returns an ActiveStorage::Variant instance with the set of +transformations+ provided. This is only relevant for image
+ # files, and it allows any image to be transformed for size, colors, and the like. Example:
+ #
+ # avatar.variant(resize_to_fit: [100, 100]).processed.service_url
+ #
+ # This will create and process a variant of the avatar blob that's constrained to a height and width of 100px.
+ # Then it'll upload said variant to the service according to a derivative key of the blob and the transformations.
+ #
+ # Frequently, though, you don't actually want to transform the variant right away. But rather simply refer to a
+ # specific variant that can be created by a controller on-demand. Like so:
+ #
+ # <%= image_tag Current.user.avatar.variant(resize_to_fit: [100, 100]) %>
+ #
+ # This will create a URL for that specific blob with that specific variant, which the ActiveStorage::RepresentationsController
+ # can then produce on-demand.
+ #
+ # Raises ActiveStorage::InvariableError if ImageMagick cannot transform the blob. To determine whether a blob is
+ # variable, call ActiveStorage::Blob#variable?.
+ def variant(transformations)
+ if variable?
+ ActiveStorage::Variant.new(self, transformations)
+ else
+ raise ActiveStorage::InvariableError
+ end
+ end
+
+ # Returns true if ImageMagick can transform the blob (its content type is in +ActiveStorage.variable_content_types+).
+ def variable?
+ ActiveStorage.variable_content_types.include?(content_type)
+ end
+
+
+ # Returns an ActiveStorage::Preview instance with the set of +transformations+ provided. A preview is an image generated
+ # from a non-image blob. Active Storage comes with built-in previewers for videos and PDF documents. The video previewer
+ # extracts the first frame from a video and the PDF previewer extracts the first page from a PDF document.
+ #
+ # blob.preview(resize_to_fit: [100, 100]).processed.service_url
+ #
+ # Avoid processing previews synchronously in views. Instead, link to a controller action that processes them on demand.
+ # Active Storage provides one, but you may want to create your own (for example, if you need authentication). Here’s
+ # how to use the built-in version:
+ #
+ # <%= image_tag video.preview(resize_to_fit: [100, 100]) %>
+ #
+ # This method raises ActiveStorage::UnpreviewableError if no previewer accepts the receiving blob. To determine
+ # whether a blob is accepted by any previewer, call ActiveStorage::Blob#previewable?.
+ def preview(transformations)
+ if previewable?
+ ActiveStorage::Preview.new(self, transformations)
+ else
+ raise ActiveStorage::UnpreviewableError
+ end
+ end
+
+ # Returns true if any registered previewer accepts the blob. By default, this will return true for videos and PDF documents.
+ def previewable?
+ ActiveStorage.previewers.any? { |klass| klass.accept?(self) }
+ end
+
+
+ # Returns an ActiveStorage::Preview for a previewable blob or an ActiveStorage::Variant for a variable image blob.
+ #
+ # blob.representation(resize_to_fit: [100, 100]).processed.service_url
+ #
+ # Raises ActiveStorage::UnrepresentableError if the receiving blob is neither variable nor previewable. Call
+ # ActiveStorage::Blob#representable? to determine whether a blob is representable.
+ #
+ # See ActiveStorage::Blob#preview and ActiveStorage::Blob#variant for more information.
+ def representation(transformations)
+ case
+ when previewable?
+ preview transformations
+ when variable?
+ variant transformations
+ else
+ raise ActiveStorage::UnrepresentableError
+ end
+ end
+
+ # Returns true if the blob is variable or previewable.
+ def representable?
+ variable? || previewable?
+ end
+end
diff --git a/activestorage/app/models/active_storage/current.rb b/activestorage/app/models/active_storage/current.rb
new file mode 100644
index 0000000000..7e431d8462
--- /dev/null
+++ b/activestorage/app/models/active_storage/current.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+class ActiveStorage::Current < ActiveSupport::CurrentAttributes #:nodoc:
+ attribute :host
+end
diff --git a/activestorage/app/models/active_storage/filename.rb b/activestorage/app/models/active_storage/filename.rb
index 79d55dc889..bebb5e61b3 100644
--- a/activestorage/app/models/active_storage/filename.rb
+++ b/activestorage/app/models/active_storage/filename.rb
@@ -3,8 +3,18 @@
# Encapsulates a string representing a filename to provide convenient access to parts of it and sanitization.
# A Filename instance is returned by ActiveStorage::Blob#filename, and is comparable so it can be used for sorting.
class ActiveStorage::Filename
+ require_dependency "active_storage/filename/parameters"
+
include Comparable
+ class << self
+ # Returns a Filename instance based on the given filename. If the filename is a Filename, it is
+ # returned unmodified. If it is a String, it is passed to ActiveStorage::Filename.new.
+ def wrap(filename)
+ filename.kind_of?(self) ? filename : new(filename)
+ end
+ end
+
def initialize(filename)
@filename = filename
end
@@ -50,7 +60,7 @@ class ActiveStorage::Filename
@filename.encode(Encoding::UTF_8, invalid: :replace, undef: :replace, replace: "�").strip.tr("\u{202E}%$|:;/\t\r\n\\", "-")
end
- def parameters
+ def parameters #:nodoc:
Parameters.new self
end
diff --git a/activestorage/app/models/active_storage/filename/parameters.rb b/activestorage/app/models/active_storage/filename/parameters.rb
index 58ce198d38..fb9ea10e49 100644
--- a/activestorage/app/models/active_storage/filename/parameters.rb
+++ b/activestorage/app/models/active_storage/filename/parameters.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ActiveStorage::Filename::Parameters
+class ActiveStorage::Filename::Parameters #:nodoc:
attr_reader :filename
def initialize(filename)
diff --git a/activestorage/app/models/active_storage/preview.rb b/activestorage/app/models/active_storage/preview.rb
index be5053edae..dd50494799 100644
--- a/activestorage/app/models/active_storage/preview.rb
+++ b/activestorage/app/models/active_storage/preview.rb
@@ -21,10 +21,9 @@
#
# Outside of a Rails application, modify +ActiveStorage.previewers+ instead.
#
-# The built-in previewers rely on third-party system libraries:
-#
-# * {ffmpeg}[https://www.ffmpeg.org]
-# * {mupdf}[https://mupdf.com]
+# The built-in previewers rely on third-party system libraries. Specifically, the built-in video previewer requires
+# {FFmpeg}[https://www.ffmpeg.org]. Two PDF previewers are provided: one requires {Poppler}[https://poppler.freedesktop.org],
+# and the other requires {muPDF}[https://mupdf.com] (version 1.8 or newer). To preview PDFs, install either Poppler or muPDF.
#
# These libraries are not provided by Rails. You must install them yourself to use the built-in previewers. Before you
# install and use third-party software, make sure you understand the licensing implications of doing so.
@@ -39,7 +38,7 @@ class ActiveStorage::Preview
# Processes the preview if it has not been processed yet. Returns the receiving Preview instance for convenience:
#
- # blob.preview(resize: "100x100").processed.service_url
+ # blob.preview(resize_to_fit: [100, 100]).processed.service_url
#
# Processing a preview generates an image from its blob and attaches the preview image to the blob. Because the preview
# image is stored with the blob, it is only generated once.
diff --git a/activestorage/app/models/active_storage/variant.rb b/activestorage/app/models/active_storage/variant.rb
index fa5aa69bd3..ea57fa5f78 100644
--- a/activestorage/app/models/active_storage/variant.rb
+++ b/activestorage/app/models/active_storage/variant.rb
@@ -1,40 +1,60 @@
# frozen_string_literal: true
+require "ostruct"
+
# Image blobs can have variants that are the result of a set of transformations applied to the original.
# These variants are used to create thumbnails, fixed-size avatars, or any other derivative image from the
# original.
#
-# Variants rely on {MiniMagick}[https://github.com/minimagick/minimagick] for the actual transformations
-# of the file, so you must add <tt>gem "mini_magick"</tt> to your Gemfile if you wish to use variants.
+# Variants rely on {ImageProcessing}[https://github.com/janko-m/image_processing] gem for the actual transformations
+# of the file, so you must add <tt>gem "image_processing"</tt> to your Gemfile if you wish to use variants. By
+# default, images will be processed with {ImageMagick}[http://imagemagick.org] using the
+# {MiniMagick}[https://github.com/minimagick/minimagick] gem, but you can also switch to the
+# {libvips}[http://jcupitt.github.io/libvips/] processor operated by the {ruby-vips}[https://github.com/jcupitt/ruby-vips]
+# gem).
+#
+# Rails.application.config.active_storage.variant_processor
+# # => :mini_magick
#
-# Note that to create a variant it's necessary to download the entire blob file from the service and load it
-# into memory. The larger the image, the more memory is used. Because of this process, you also want to be
-# considerate about when the variant is actually processed. You shouldn't be processing variants inline in a
-# template, for example. Delay the processing to an on-demand controller, like the one provided in
-# ActiveStorage::VariantsController.
+# Rails.application.config.active_storage.variant_processor = :vips
+# # => :vips
+#
+# Note that to create a variant it's necessary to download the entire blob file from the service. Because of this process,
+# you also want to be considerate about when the variant is actually processed. You shouldn't be processing variants inline
+# in a template, for example. Delay the processing to an on-demand controller, like the one provided in
+# ActiveStorage::RepresentationsController.
#
# To refer to such a delayed on-demand variant, simply link to the variant through the resolved route provided
# by Active Storage like so:
#
-# <%= image_tag url_for(Current.user.avatar.variant(resize: "100x100")) %>
+# <%= image_tag Current.user.avatar.variant(resize_to_fit: [100, 100]) %>
#
-# This will create a URL for that specific blob with that specific variant, which the ActiveStorage::VariantsController
+# This will create a URL for that specific blob with that specific variant, which the ActiveStorage::RepresentationsController
# can then produce on-demand.
#
# When you do want to actually produce the variant needed, call +processed+. This will check that the variant
# has already been processed and uploaded to the service, and, if so, just return that. Otherwise it will perform
# the transformations, upload the variant to the service, and return itself again. Example:
#
-# avatar.variant(resize: "100x100").processed.service_url
+# avatar.variant(resize_to_fit: [100, 100]).processed.service_url
#
# This will create and process a variant of the avatar blob that's constrained to a height and width of 100.
# Then it'll upload said variant to the service according to a derivative key of the blob and the transformations.
#
-# A list of all possible transformations is available at https://www.imagemagick.org/script/mogrify.php. You can
-# combine as many as you like freely:
+# You can combine any number of ImageMagick/libvips operations into a variant, as well as any macros provided by the
+# ImageProcessing gem (such as +resize_to_fit+):
+#
+# avatar.variant(resize_to_fit: [800, 800], monochrome: true, rotate: "-90")
#
-# avatar.variant(resize: "100x100", monochrome: true, flip: "-90")
+# Visit the following links for a list of available ImageProcessing commands and ImageMagick/libvips operations:
+#
+# * {ImageProcessing::MiniMagick}[https://github.com/janko-m/image_processing/blob/master/doc/minimagick.md#methods]
+# * {ImageMagick reference}[https://www.imagemagick.org/script/mogrify.php]
+# * {ImageProcessing::Vips}[https://github.com/janko-m/image_processing/blob/master/doc/vips.md#methods]
+# * {ruby-vips reference}[http://www.rubydoc.info/gems/ruby-vips/Vips/Image]
class ActiveStorage::Variant
+ WEB_IMAGE_CONTENT_TYPES = %w[ image/png image/jpeg image/jpg image/gif ]
+
attr_reader :blob, :variation
delegate :service, to: :blob
@@ -59,10 +79,10 @@ class ActiveStorage::Variant
# it allows permanent URLs that redirect to the +service_url+ to be cached in the view.
#
# Use <tt>url_for(variant)</tt> (or the implied form, like +link_to variant+ or +redirect_to variant+) to get the stable URL
- # for a variant that points to the ActiveStorage::VariantsController, which in turn will use this +service_call+ method
+ # for a variant that points to the ActiveStorage::RepresentationsController, which in turn will use this +service_call+ method
# for its redirection.
- def service_url(expires_in: service.url_expires_in, disposition: :inline)
- service.url key, expires_in: expires_in, disposition: disposition, filename: blob.filename, content_type: blob.content_type
+ def service_url(expires_in: ActiveStorage.service_urls_expire_in, disposition: :inline)
+ service.url key, expires_in: expires_in, disposition: disposition, filename: filename, content_type: content_type
end
# Returns the receiving variant. Allows ActiveStorage::Variant and ActiveStorage::Preview instances to be used interchangeably.
@@ -76,11 +96,36 @@ class ActiveStorage::Variant
end
def process
- service.upload key, transform(service.download(blob.key))
+ blob.open do |image|
+ transform(image) { |output| upload(output) }
+ end
end
- def transform(io)
- require "mini_magick"
- File.open MiniMagick::Image.read(io).tap { |image| variation.transform(image) }.path
+ def transform(image, &block)
+ variation.transform(image, format: format, &block)
end
+
+ def upload(file)
+ service.upload(key, file)
+ end
+
+
+ def specification
+ @specification ||=
+ if WEB_IMAGE_CONTENT_TYPES.include?(blob.content_type)
+ Specification.new \
+ filename: blob.filename,
+ content_type: blob.content_type,
+ format: nil
+ else
+ Specification.new \
+ filename: ActiveStorage::Filename.new("#{blob.filename.base}.png"),
+ content_type: "image/png",
+ format: "png"
+ end
+ end
+
+ delegate :filename, :content_type, :format, to: :specification
+
+ class Specification < OpenStruct; end
end
diff --git a/activestorage/app/models/active_storage/variation.rb b/activestorage/app/models/active_storage/variation.rb
index 13bad87cac..3adc2407e5 100644
--- a/activestorage/app/models/active_storage/variation.rb
+++ b/activestorage/app/models/active_storage/variation.rb
@@ -6,9 +6,9 @@
# In case you do need to use this directly, it's instantiated using a hash of transformations where
# the key is the command and the value is the arguments. Example:
#
-# ActiveStorage::Variation.new(resize: "100x100", monochrome: true, trim: true, rotate: "-90")
+# ActiveStorage::Variation.new(resize_to_fit: [100, 100], monochrome: true, trim: true, rotate: "-90")
#
-# A list of all possible transformations is available at https://www.imagemagick.org/script/mogrify.php.
+# The options map directly to {ImageProcessing}[https://github.com/janko-m/image_processing] commands.
class ActiveStorage::Variation
attr_reader :transformations
@@ -43,15 +43,13 @@ class ActiveStorage::Variation
@transformations = transformations
end
- # Accepts an open MiniMagick image instance, like what's returned by <tt>MiniMagick::Image.read(io)</tt>,
- # and performs the +transformations+ against it. The transformed image instance is then returned.
- def transform(image)
- transformations.each do |(method, argument)|
- if eligible_argument?(argument)
- image.public_send(method, argument)
- else
- image.public_send(method)
- end
+ # Accepts a File object, performs the +transformations+ against it, and
+ # saves the transformed image into a temporary file. If +format+ is specified
+ # it will be the format of the result image, otherwise the result image
+ # retains the source format.
+ def transform(file, format: nil, &block)
+ ActiveSupport::Notifications.instrument("transform.active_storage") do
+ transformer.transform(file, format: format, &block)
end
end
@@ -61,7 +59,22 @@ class ActiveStorage::Variation
end
private
- def eligible_argument?(argument)
- argument.present? && argument != true
+ def transformer
+ if ActiveStorage.variant_processor
+ begin
+ require "image_processing"
+ rescue LoadError
+ ActiveSupport::Deprecation.warn <<~WARNING
+ Generating image variants will require the image_processing gem in Rails 6.1.
+ Please add `gem 'image_processing', '~> 1.2'` to your Gemfile.
+ WARNING
+
+ ActiveStorage::Transformers::MiniMagickTransformer.new(transformations)
+ else
+ ActiveStorage::Transformers::ImageProcessingTransformer.new(transformations)
+ end
+ else
+ ActiveStorage::Transformers::MiniMagickTransformer.new(transformations)
+ end
end
end
diff --git a/activestorage/config/routes.rb b/activestorage/config/routes.rb
index c659e079fd..20d19f334a 100644
--- a/activestorage/config/routes.rb
+++ b/activestorage/config/routes.rb
@@ -1,43 +1,31 @@
# frozen_string_literal: true
Rails.application.routes.draw do
- get "/rails/active_storage/blobs/:signed_id/*filename" => "active_storage/blobs#show", as: :rails_service_blob, internal: true
+ get "/rails/active_storage/blobs/:signed_id/*filename" => "active_storage/blobs#show", as: :rails_service_blob
- direct :rails_blob do |blob|
- route_for(:rails_service_blob, blob.signed_id, blob.filename)
+ direct :rails_blob do |blob, options|
+ route_for(:rails_service_blob, blob.signed_id, blob.filename, options)
end
- resolve("ActiveStorage::Blob") { |blob| route_for(:rails_blob, blob) }
- resolve("ActiveStorage::Attachment") { |attachment| route_for(:rails_blob, attachment.blob) }
+ resolve("ActiveStorage::Blob") { |blob, options| route_for(:rails_blob, blob, options) }
+ resolve("ActiveStorage::Attachment") { |attachment, options| route_for(:rails_blob, attachment.blob, options) }
- get "/rails/active_storage/variants/:signed_blob_id/:variation_key/*filename" => "active_storage/variants#show", as: :rails_blob_variation, internal: true
+ get "/rails/active_storage/representations/:signed_blob_id/:variation_key/*filename" => "active_storage/representations#show", as: :rails_blob_representation
- direct :rails_variant do |variant|
- signed_blob_id = variant.blob.signed_id
- variation_key = variant.variation.key
- filename = variant.blob.filename
+ direct :rails_representation do |representation, options|
+ signed_blob_id = representation.blob.signed_id
+ variation_key = representation.variation.key
+ filename = representation.blob.filename
- route_for(:rails_blob_variation, signed_blob_id, variation_key, filename)
+ route_for(:rails_blob_representation, signed_blob_id, variation_key, filename, options)
end
- resolve("ActiveStorage::Variant") { |variant| route_for(:rails_variant, variant) }
+ resolve("ActiveStorage::Variant") { |variant, options| route_for(:rails_representation, variant, options) }
+ resolve("ActiveStorage::Preview") { |preview, options| route_for(:rails_representation, preview, options) }
- get "/rails/active_storage/previews/:signed_blob_id/:variation_key/*filename" => "active_storage/previews#show", as: :rails_blob_preview, internal: true
-
- direct :rails_preview do |preview|
- signed_blob_id = preview.blob.signed_id
- variation_key = preview.variation.key
- filename = preview.blob.filename
-
- route_for(:rails_blob_preview, signed_blob_id, variation_key, filename)
- end
-
- resolve("ActiveStorage::Preview") { |preview| route_for(:rails_preview, preview) }
-
-
- get "/rails/active_storage/disk/:encoded_key/*filename" => "active_storage/disk#show", as: :rails_disk_service, internal: true
- put "/rails/active_storage/disk/:encoded_token" => "active_storage/disk#update", as: :update_rails_disk_service, internal: true
- post "/rails/active_storage/direct_uploads" => "active_storage/direct_uploads#create", as: :rails_direct_uploads, internal: true
+ get "/rails/active_storage/disk/:encoded_key/*filename" => "active_storage/disk#show", as: :rails_disk_service
+ put "/rails/active_storage/disk/:encoded_token" => "active_storage/disk#update", as: :update_rails_disk_service
+ post "/rails/active_storage/direct_uploads" => "active_storage/direct_uploads#create", as: :rails_direct_uploads
end
diff --git a/activestorage/db/migrate/20170806125915_create_active_storage_tables.rb b/activestorage/db/migrate/20170806125915_create_active_storage_tables.rb
index 9e31e3966a..cfaf01cd5e 100644
--- a/activestorage/db/migrate/20170806125915_create_active_storage_tables.rb
+++ b/activestorage/db/migrate/20170806125915_create_active_storage_tables.rb
@@ -20,6 +20,7 @@ class CreateActiveStorageTables < ActiveRecord::Migration[5.2]
t.datetime :created_at, null: false
t.index [ :record_type, :record_id, :name, :blob_id ], name: "index_active_storage_attachments_uniqueness", unique: true
+ t.foreign_key :active_storage_blobs, column: :blob_id
end
end
end
diff --git a/activestorage/lib/active_storage.rb b/activestorage/lib/active_storage.rb
index d1ff6b7032..d3e3a2f49b 100644
--- a/activestorage/lib/active_storage.rb
+++ b/activestorage/lib/active_storage.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
#--
-# Copyright (c) 2017 David Heinemeier Hansson
+# Copyright (c) 2017-2018 David Heinemeier Hansson, Basecamp
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -26,7 +26,11 @@
require "active_record"
require "active_support"
require "active_support/rails"
+
require "active_storage/version"
+require "active_storage/errors"
+
+require "marcel"
module ActiveStorage
extend ActiveSupport::Autoload
@@ -41,4 +45,17 @@ module ActiveStorage
mattr_accessor :queue
mattr_accessor :previewers, default: []
mattr_accessor :analyzers, default: []
+ mattr_accessor :variant_processor, default: :mini_magick
+ mattr_accessor :paths, default: {}
+ mattr_accessor :variable_content_types, default: []
+ mattr_accessor :content_types_to_serve_as_binary, default: []
+ mattr_accessor :service_urls_expire_in, default: 5.minutes
+
+ module Transformers
+ extend ActiveSupport::Autoload
+
+ autoload :Transformer
+ autoload :ImageProcessingTransformer
+ autoload :MiniMagickTransformer
+ end
end
diff --git a/activestorage/lib/active_storage/analyzer.rb b/activestorage/lib/active_storage/analyzer.rb
index 837785a12b..caa25418a5 100644
--- a/activestorage/lib/active_storage/analyzer.rb
+++ b/activestorage/lib/active_storage/analyzer.rb
@@ -1,13 +1,9 @@
# frozen_string_literal: true
-require "active_storage/downloading"
-
module ActiveStorage
# This is an abstract base class for analyzers, which extract metadata from blobs. See
# ActiveStorage::Analyzer::ImageAnalyzer for an example of a concrete subclass.
class Analyzer
- include Downloading
-
attr_reader :blob
# Implement this method in a concrete subclass. Have it return true when given a blob from which
@@ -26,8 +22,17 @@ module ActiveStorage
end
private
- def logger
+ # Downloads the blob to a tempfile on disk. Yields the tempfile.
+ def download_blob_to_tempfile(&block) #:doc:
+ blob.open tempdir: tempdir, &block
+ end
+
+ def logger #:doc:
ActiveStorage.logger
end
+
+ def tempdir #:doc:
+ Dir.tmpdir
+ end
end
end
diff --git a/activestorage/lib/active_storage/analyzer/image_analyzer.rb b/activestorage/lib/active_storage/analyzer/image_analyzer.rb
index 25e0251e6e..3b39de91be 100644
--- a/activestorage/lib/active_storage/analyzer/image_analyzer.rb
+++ b/activestorage/lib/active_storage/analyzer/image_analyzer.rb
@@ -3,14 +3,15 @@
module ActiveStorage
# Extracts width and height in pixels from an image blob.
#
+ # If the image contains EXIF data indicating its angle is 90 or 270 degrees, its width and height are swapped for convenience.
+ #
# Example:
#
# ActiveStorage::Analyzer::ImageAnalyzer.new(blob).metadata
# # => { width: 4104, height: 2736 }
#
# This analyzer relies on the third-party {MiniMagick}[https://github.com/minimagick/minimagick] gem. MiniMagick requires
- # the {ImageMagick}[http://www.imagemagick.org] system library. These libraries are not provided by Rails; you must
- # install them yourself to use this analyzer.
+ # the {ImageMagick}[http://www.imagemagick.org] system library.
class Analyzer::ImageAnalyzer < Analyzer
def self.accept?(blob)
blob.image?
@@ -18,7 +19,11 @@ module ActiveStorage
def metadata
read_image do |image|
- { width: image.width, height: image.height }
+ if rotated_image?(image)
+ { width: image.height, height: image.width }
+ else
+ { width: image.width, height: image.height }
+ end
end
rescue LoadError
logger.info "Skipping image analysis because the mini_magick gem isn't installed"
@@ -32,5 +37,9 @@ module ActiveStorage
yield MiniMagick::Image.new(file.path)
end
end
+
+ def rotated_image?(image)
+ %w[ RightTop LeftBottom ].include?(image["%[orientation]"])
+ end
end
end
diff --git a/activestorage/lib/active_storage/analyzer/video_analyzer.rb b/activestorage/lib/active_storage/analyzer/video_analyzer.rb
index 408b5e58e9..18d8ff8237 100644
--- a/activestorage/lib/active_storage/analyzer/video_analyzer.rb
+++ b/activestorage/lib/active_storage/analyzer/video_analyzer.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require "active_support/core_ext/hash/compact"
-
module ActiveStorage
# Extracts the following from a video blob:
#
@@ -9,31 +7,40 @@ module ActiveStorage
# * Height (pixels)
# * Duration (seconds)
# * Angle (degrees)
- # * Aspect ratio
+ # * Display aspect ratio
#
# Example:
#
# ActiveStorage::VideoAnalyzer.new(blob).metadata
- # # => { width: 640, height: 480, duration: 5.0, angle: 0, aspect_ratio: [4, 3] }
+ # # => { width: 640.0, height: 480.0, duration: 5.0, angle: 0, display_aspect_ratio: [4, 3] }
+ #
+ # When a video's angle is 90 or 270 degrees, its width and height are automatically swapped for convenience.
#
- # This analyzer requires the {ffmpeg}[https://www.ffmpeg.org] system library, which is not provided by Rails. You must
- # install ffmpeg yourself to use this analyzer.
+ # This analyzer requires the {FFmpeg}[https://www.ffmpeg.org] system library, which is not provided by Rails.
class Analyzer::VideoAnalyzer < Analyzer
def self.accept?(blob)
blob.video?
end
def metadata
- { width: width, height: height, duration: duration, angle: angle, aspect_ratio: aspect_ratio }.compact
+ { width: width, height: height, duration: duration, angle: angle, display_aspect_ratio: display_aspect_ratio }.compact
end
private
def width
- Integer(video_stream["width"]) if video_stream["width"]
+ if rotated?
+ computed_height || encoded_height
+ else
+ encoded_width
+ end
end
def height
- Integer(video_stream["height"]) if video_stream["height"]
+ if rotated?
+ encoded_width
+ else
+ computed_height || encoded_height
+ end
end
def duration
@@ -44,13 +51,41 @@ module ActiveStorage
Integer(tags["rotate"]) if tags["rotate"]
end
- def aspect_ratio
+ def display_aspect_ratio
if descriptor = video_stream["display_aspect_ratio"]
- descriptor.split(":", 2).collect(&:to_i)
+ if terms = descriptor.split(":", 2)
+ numerator = Integer(terms[0])
+ denominator = Integer(terms[1])
+
+ [numerator, denominator] unless numerator == 0
+ end
end
end
+ def rotated?
+ angle == 90 || angle == 270
+ end
+
+ def computed_height
+ if encoded_width && display_height_scale
+ encoded_width * display_height_scale
+ end
+ end
+
+ def encoded_width
+ @encoded_width ||= Float(video_stream["width"]) if video_stream["width"]
+ end
+
+ def encoded_height
+ @encoded_height ||= Float(video_stream["height"]) if video_stream["height"]
+ end
+
+ def display_height_scale
+ @display_height_scale ||= Float(display_aspect_ratio.last) / display_aspect_ratio.first if display_aspect_ratio
+ end
+
+
def tags
@tags ||= video_stream["tags"] || {}
end
@@ -68,12 +103,16 @@ module ActiveStorage
end
def probe_from(file)
- IO.popen([ "ffprobe", "-print_format", "json", "-show_streams", "-v", "error", file.path ]) do |output|
+ IO.popen([ ffprobe_path, "-print_format", "json", "-show_streams", "-v", "error", file.path ]) do |output|
JSON.parse(output.read)
end
rescue Errno::ENOENT
- logger.info "Skipping video analysis because ffmpeg isn't installed"
+ logger.info "Skipping video analysis because FFmpeg isn't installed"
{}
end
+
+ def ffprobe_path
+ ActiveStorage.paths[:ffprobe] || "ffprobe"
+ end
end
end
diff --git a/activestorage/lib/active_storage/attached.rb b/activestorage/lib/active_storage/attached.rb
index c08fd56652..b540f85fbe 100644
--- a/activestorage/lib/active_storage/attached.rb
+++ b/activestorage/lib/active_storage/attached.rb
@@ -1,40 +1,25 @@
# frozen_string_literal: true
-require "action_dispatch"
-require "action_dispatch/http/upload"
require "active_support/core_ext/module/delegation"
module ActiveStorage
# Abstract base class for the concrete ActiveStorage::Attached::One and ActiveStorage::Attached::Many
# classes that both provide proxy access to the blob association for a record.
class Attached
- attr_reader :name, :record, :dependent
+ attr_reader :name, :record
- def initialize(name, record, dependent:)
- @name, @record, @dependent = name, record, dependent
+ def initialize(name, record)
+ @name, @record = name, record
end
private
- def create_blob_from(attachable)
- case attachable
- when ActiveStorage::Blob
- attachable
- when ActionDispatch::Http::UploadedFile, Rack::Test::UploadedFile
- ActiveStorage::Blob.create_after_upload! \
- io: attachable.open,
- filename: attachable.original_filename,
- content_type: attachable.content_type
- when Hash
- ActiveStorage::Blob.create_after_upload!(attachable)
- when String
- ActiveStorage::Blob.find_signed(attachable)
- else
- nil
- end
+ def change
+ record.attachment_changes[name]
end
end
end
+require "active_storage/attached/model"
require "active_storage/attached/one"
require "active_storage/attached/many"
-require "active_storage/attached/macros"
+require "active_storage/attached/changes"
diff --git a/activestorage/lib/active_storage/attached/changes.rb b/activestorage/lib/active_storage/attached/changes.rb
new file mode 100644
index 0000000000..1db3906a63
--- /dev/null
+++ b/activestorage/lib/active_storage/attached/changes.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ module Attached::Changes #:nodoc:
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :CreateOne
+ autoload :CreateMany
+ autoload :CreateOneOfMany
+
+ autoload :DeleteOne
+ autoload :DeleteMany
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/attached/changes/create_many.rb b/activestorage/lib/active_storage/attached/changes/create_many.rb
new file mode 100644
index 0000000000..a7a8553e0f
--- /dev/null
+++ b/activestorage/lib/active_storage/attached/changes/create_many.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ class Attached::Changes::CreateMany #:nodoc:
+ attr_reader :name, :record, :attachables
+
+ def initialize(name, record, attachables)
+ @name, @record, @attachables = name, record, Array(attachables)
+ end
+
+ def attachments
+ @attachments ||= subchanges.collect(&:attachment)
+ end
+
+ def blobs
+ @blobs ||= subchanges.collect(&:blob)
+ end
+
+ def upload
+ subchanges.each(&:upload)
+ end
+
+ def save
+ assign_associated_attachments
+ reset_associated_blobs
+ end
+
+ private
+ def subchanges
+ @subchanges ||= attachables.collect { |attachable| build_subchange_from(attachable) }
+ end
+
+ def build_subchange_from(attachable)
+ ActiveStorage::Attached::Changes::CreateOneOfMany.new(name, record, attachable)
+ end
+
+
+ def assign_associated_attachments
+ record.public_send("#{name}_attachments=", attachments)
+ end
+
+ def reset_associated_blobs
+ record.public_send("#{name}_blobs").reset
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/attached/changes/create_one.rb b/activestorage/lib/active_storage/attached/changes/create_one.rb
new file mode 100644
index 0000000000..5812fd2b08
--- /dev/null
+++ b/activestorage/lib/active_storage/attached/changes/create_one.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require "action_dispatch"
+require "action_dispatch/http/upload"
+
+module ActiveStorage
+ class Attached::Changes::CreateOne #:nodoc:
+ attr_reader :name, :record, :attachable
+
+ def initialize(name, record, attachable)
+ @name, @record, @attachable = name, record, attachable
+ end
+
+ def attachment
+ @attachment ||= find_or_build_attachment
+ end
+
+ def blob
+ @blob ||= find_or_build_blob
+ end
+
+ def upload
+ case attachable
+ when ActionDispatch::Http::UploadedFile, Rack::Test::UploadedFile
+ blob.upload_without_unfurling(attachable.open)
+ when Hash
+ blob.upload_without_unfurling(attachable.fetch(:io))
+ end
+ end
+
+ def save
+ record.public_send("#{name}_attachment=", attachment)
+ end
+
+ private
+ def find_or_build_attachment
+ find_attachment || build_attachment
+ end
+
+ def find_attachment
+ if record.public_send("#{name}_blob") == blob
+ record.public_send("#{name}_attachment")
+ end
+ end
+
+ def build_attachment
+ ActiveStorage::Attachment.new(record: record, name: name, blob: blob)
+ end
+
+ def find_or_build_blob
+ case attachable
+ when ActiveStorage::Blob
+ attachable
+ when ActionDispatch::Http::UploadedFile, Rack::Test::UploadedFile
+ ActiveStorage::Blob.build_after_unfurling \
+ io: attachable.open,
+ filename: attachable.original_filename,
+ content_type: attachable.content_type
+ when Hash
+ ActiveStorage::Blob.build_after_unfurling(attachable)
+ when String
+ ActiveStorage::Blob.find_signed(attachable)
+ else
+ raise ArgumentError, "Could not find or build blob: expected attachable, got #{attachable.inspect}"
+ end
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/attached/changes/create_one_of_many.rb b/activestorage/lib/active_storage/attached/changes/create_one_of_many.rb
new file mode 100644
index 0000000000..7268e87316
--- /dev/null
+++ b/activestorage/lib/active_storage/attached/changes/create_one_of_many.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ class Attached::Changes::CreateOneOfMany < Attached::Changes::CreateOne #:nodoc:
+ private
+ def find_attachment
+ record.public_send("#{name}_attachments").detect { |attachment| attachment.blob_id == blob.id }
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/attached/changes/delete_many.rb b/activestorage/lib/active_storage/attached/changes/delete_many.rb
new file mode 100644
index 0000000000..6cbd1158dc
--- /dev/null
+++ b/activestorage/lib/active_storage/attached/changes/delete_many.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ class Attached::Changes::DeleteMany #:nodoc:
+ attr_reader :name, :record
+
+ def initialize(name, record)
+ @name, @record = name, record
+ end
+
+ def attachments
+ ActiveStorage::Attachment.none
+ end
+
+ def blobs
+ ActiveStorage::Blob.none
+ end
+
+ def save
+ record.public_send("#{name}_attachments=", [])
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/attached/changes/delete_one.rb b/activestorage/lib/active_storage/attached/changes/delete_one.rb
new file mode 100644
index 0000000000..2f7d356613
--- /dev/null
+++ b/activestorage/lib/active_storage/attached/changes/delete_one.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ class Attached::Changes::DeleteOne #:nodoc:
+ attr_reader :name, :record
+
+ def initialize(name, record)
+ @name, @record = name, record
+ end
+
+ def attachment
+ nil
+ end
+
+ def save
+ record.public_send("#{name}_attachment=", nil)
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/attached/macros.rb b/activestorage/lib/active_storage/attached/macros.rb
deleted file mode 100644
index f0256718ac..0000000000
--- a/activestorage/lib/active_storage/attached/macros.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-module ActiveStorage
- # Provides the class-level DSL for declaring that an Active Record model has attached blobs.
- module Attached::Macros
- # Specifies the relation between a single attachment and the model.
- #
- # class User < ActiveRecord::Base
- # has_one_attached :avatar
- # end
- #
- # There is no column defined on the model side, Active Storage takes
- # care of the mapping between your records and the attachment.
- #
- # To avoid N+1 queries, you can include the attached blobs in your query like so:
- #
- # User.with_attached_avatar
- #
- # Under the covers, this relationship is implemented as a +has_one+ association to a
- # ActiveStorage::Attachment record and a +has_one-through+ association to a
- # ActiveStorage::Blob record. These associations are available as +avatar_attachment+
- # and +avatar_blob+. But you shouldn't need to work with these associations directly in
- # most circumstances.
- #
- # The system has been designed to having you go through the ActiveStorage::Attached::One
- # proxy that provides the dynamic proxy to the associations and factory methods, like +attach+.
- #
- # If the +:dependent+ option isn't set, the attachment will be purged
- # (i.e. destroyed) whenever the record is destroyed.
- def has_one_attached(name, dependent: :purge_later)
- class_eval <<-CODE, __FILE__, __LINE__ + 1
- def #{name}
- @active_storage_attached_#{name} ||= ActiveStorage::Attached::One.new("#{name}", self, dependent: #{dependent == :purge_later ? ":purge_later" : "false"})
- end
- CODE
-
- has_one :"#{name}_attachment", -> { where(name: name) }, class_name: "ActiveStorage::Attachment", as: :record
- has_one :"#{name}_blob", through: :"#{name}_attachment", class_name: "ActiveStorage::Blob", source: :blob
-
- scope :"with_attached_#{name}", -> { includes("#{name}_attachment": :blob) }
-
- if dependent == :purge_later
- before_destroy { public_send(name).purge_later }
- end
- end
-
- # Specifies the relation between multiple attachments and the model.
- #
- # class Gallery < ActiveRecord::Base
- # has_many_attached :photos
- # end
- #
- # There are no columns defined on the model side, Active Storage takes
- # care of the mapping between your records and the attachments.
- #
- # To avoid N+1 queries, you can include the attached blobs in your query like so:
- #
- # Gallery.where(user: Current.user).with_attached_photos
- #
- # Under the covers, this relationship is implemented as a +has_many+ association to a
- # ActiveStorage::Attachment record and a +has_many-through+ association to a
- # ActiveStorage::Blob record. These associations are available as +photos_attachments+
- # and +photos_blobs+. But you shouldn't need to work with these associations directly in
- # most circumstances.
- #
- # The system has been designed to having you go through the ActiveStorage::Attached::Many
- # proxy that provides the dynamic proxy to the associations and factory methods, like +#attach+.
- #
- # If the +:dependent+ option isn't set, all the attachments will be purged
- # (i.e. destroyed) whenever the record is destroyed.
- def has_many_attached(name, dependent: :purge_later)
- class_eval <<-CODE, __FILE__, __LINE__ + 1
- def #{name}
- @active_storage_attached_#{name} ||= ActiveStorage::Attached::Many.new("#{name}", self, dependent: #{dependent == :purge_later ? ":purge_later" : "false"})
- end
- CODE
-
- has_many :"#{name}_attachments", -> { where(name: name) }, as: :record, class_name: "ActiveStorage::Attachment"
- has_many :"#{name}_blobs", through: :"#{name}_attachments", class_name: "ActiveStorage::Blob", source: :blob
-
- scope :"with_attached_#{name}", -> { includes("#{name}_attachments": :blob) }
-
- if dependent == :purge_later
- before_destroy { public_send(name).purge_later }
- end
- end
- end
-end
diff --git a/activestorage/lib/active_storage/attached/many.rb b/activestorage/lib/active_storage/attached/many.rb
index 1e0657c33c..25f88284df 100644
--- a/activestorage/lib/active_storage/attached/many.rb
+++ b/activestorage/lib/active_storage/attached/many.rb
@@ -9,19 +9,29 @@ module ActiveStorage
#
# All methods called on this proxy object that aren't listed here will automatically be delegated to +attachments+.
def attachments
- record.public_send("#{name}_attachments")
+ change.present? ? change.attachments : record.public_send("#{name}_attachments")
end
- # Associates one or several attachments with the current record, saving them to the database.
- # Examples:
+ # Returns all attached blobs.
+ def blobs
+ change.present? ? change.blobs : record.public_send("#{name}_blobs")
+ end
+
+ # Attaches one or more +attachables+ to the record.
+ #
+ # If the record is persisted and unchanged, the attachments are saved to
+ # the database immediately. Otherwise, they'll be saved to the DB when the
+ # record is next saved.
#
# document.images.attach(params[:images]) # Array of ActionDispatch::Http::UploadedFile objects
# document.images.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
# document.images.attach(io: File.open("/path/to/racecar.jpg"), filename: "racecar.jpg", content_type: "image/jpg")
# document.images.attach([ first_blob, second_blob ])
def attach(*attachables)
- attachables.flatten.collect do |attachable|
- attachments.create!(name: name, blob: create_blob_from(attachable))
+ if record.persisted? && !record.changed?
+ record.update(name => blobs + attachables.flatten)
+ else
+ record.public_send("#{name}=", blobs + attachables.flatten)
end
end
@@ -36,20 +46,20 @@ module ActiveStorage
attachments.any?
end
+ # Deletes associated attachments without purging them, leaving their respective blobs in place.
+ def detach
+ attachments.delete_all if attached?
+ end
+
+ ##
+ # :method: purge
+ #
# Directly purges each associated attachment (i.e. destroys the blobs and
# attachments and deletes the files on the service).
- def purge
- if attached?
- attachments.each(&:purge)
- attachments.reload
- end
- end
+ ##
+ # :method: purge_later
+ #
# Purges each associated attachment through the queuing system.
- def purge_later
- if attached?
- attachments.each(&:purge_later)
- end
- end
end
end
diff --git a/activestorage/lib/active_storage/attached/model.rb b/activestorage/lib/active_storage/attached/model.rb
new file mode 100644
index 0000000000..ae7f0685f2
--- /dev/null
+++ b/activestorage/lib/active_storage/attached/model.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ # Provides the class-level DSL for declaring an Active Record model's attachments.
+ module Attached::Model
+ extend ActiveSupport::Concern
+
+ class_methods do
+ # Specifies the relation between a single attachment and the model.
+ #
+ # class User < ActiveRecord::Base
+ # has_one_attached :avatar
+ # end
+ #
+ # There is no column defined on the model side, Active Storage takes
+ # care of the mapping between your records and the attachment.
+ #
+ # To avoid N+1 queries, you can include the attached blobs in your query like so:
+ #
+ # User.with_attached_avatar
+ #
+ # Under the covers, this relationship is implemented as a +has_one+ association to a
+ # ActiveStorage::Attachment record and a +has_one-through+ association to a
+ # ActiveStorage::Blob record. These associations are available as +avatar_attachment+
+ # and +avatar_blob+. But you shouldn't need to work with these associations directly in
+ # most circumstances.
+ #
+ # The system has been designed to having you go through the ActiveStorage::Attached::One
+ # proxy that provides the dynamic proxy to the associations and factory methods, like +attach+.
+ #
+ # If the +:dependent+ option isn't set, the attachment will be purged
+ # (i.e. destroyed) whenever the record is destroyed.
+ def has_one_attached(name, dependent: :purge_later)
+ generated_association_methods.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}
+ @active_storage_attached_#{name} ||= ActiveStorage::Attached::One.new("#{name}", self)
+ end
+
+ def #{name}=(attachable)
+ attachment_changes["#{name}"] =
+ if attachable.nil?
+ ActiveStorage::Attached::Changes::DeleteOne.new("#{name}", self)
+ else
+ ActiveStorage::Attached::Changes::CreateOne.new("#{name}", self, attachable)
+ end
+ end
+ CODE
+
+ has_one :"#{name}_attachment", -> { where(name: name) }, class_name: "ActiveStorage::Attachment", as: :record, inverse_of: :record, dependent: :destroy
+ has_one :"#{name}_blob", through: :"#{name}_attachment", class_name: "ActiveStorage::Blob", source: :blob
+
+ scope :"with_attached_#{name}", -> { includes("#{name}_attachment": :blob) }
+
+ after_save { attachment_changes[name.to_s]&.save }
+
+ after_commit(on: %i[ create update ]) { attachment_changes.delete(name.to_s).try(:upload) }
+
+ ActiveRecord::Reflection.add_attachment_reflection(
+ self,
+ name,
+ ActiveRecord::Reflection.create(:has_one_attached, name, nil, { dependent: dependent }, self)
+ )
+ end
+
+ # Specifies the relation between multiple attachments and the model.
+ #
+ # class Gallery < ActiveRecord::Base
+ # has_many_attached :photos
+ # end
+ #
+ # There are no columns defined on the model side, Active Storage takes
+ # care of the mapping between your records and the attachments.
+ #
+ # To avoid N+1 queries, you can include the attached blobs in your query like so:
+ #
+ # Gallery.where(user: Current.user).with_attached_photos
+ #
+ # Under the covers, this relationship is implemented as a +has_many+ association to a
+ # ActiveStorage::Attachment record and a +has_many-through+ association to a
+ # ActiveStorage::Blob record. These associations are available as +photos_attachments+
+ # and +photos_blobs+. But you shouldn't need to work with these associations directly in
+ # most circumstances.
+ #
+ # The system has been designed to having you go through the ActiveStorage::Attached::Many
+ # proxy that provides the dynamic proxy to the associations and factory methods, like +#attach+.
+ #
+ # If the +:dependent+ option isn't set, all the attachments will be purged
+ # (i.e. destroyed) whenever the record is destroyed.
+ def has_many_attached(name, dependent: :purge_later)
+ generated_association_methods.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}
+ @active_storage_attached_#{name} ||= ActiveStorage::Attached::Many.new("#{name}", self)
+ end
+
+ def #{name}=(attachables)
+ attachment_changes["#{name}"] =
+ if attachables.nil? || Array(attachables).none?
+ ActiveStorage::Attached::Changes::DeleteMany.new("#{name}", self)
+ else
+ ActiveStorage::Attached::Changes::CreateMany.new("#{name}", self, attachables)
+ end
+ end
+ CODE
+
+ has_many :"#{name}_attachments", -> { where(name: name) }, as: :record, class_name: "ActiveStorage::Attachment", inverse_of: :record, dependent: :destroy do
+ def purge
+ each(&:purge)
+ reset
+ end
+
+ def purge_later
+ each(&:purge_later)
+ reset
+ end
+ end
+ has_many :"#{name}_blobs", through: :"#{name}_attachments", class_name: "ActiveStorage::Blob", source: :blob
+
+ scope :"with_attached_#{name}", -> { includes("#{name}_attachments": :blob) }
+
+ after_save { attachment_changes[name.to_s]&.save }
+
+ after_commit(on: %i[ create update ]) { attachment_changes.delete(name.to_s).try(:upload) }
+
+ ActiveRecord::Reflection.add_attachment_reflection(
+ self,
+ name,
+ ActiveRecord::Reflection.create(:has_many_attached, name, nil, { dependent: dependent }, self)
+ )
+ end
+ end
+
+ def attachment_changes #:nodoc:
+ @attachment_changes ||= {}
+ end
+
+ def reload(*) #:nodoc:
+ super.tap { @attachment_changes = nil }
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/attached/one.rb b/activestorage/lib/active_storage/attached/one.rb
index dc19512484..c039226fcd 100644
--- a/activestorage/lib/active_storage/attached/one.rb
+++ b/activestorage/lib/active_storage/attached/one.rb
@@ -10,21 +10,28 @@ module ActiveStorage
# You don't have to call this method to access the attachment's methods as
# they are all available at the model level.
def attachment
- record.public_send("#{name}_attachment")
+ change.present? ? change.attachment : record.public_send("#{name}_attachment")
end
- # Associates a given attachment with the current record, saving it to the database.
- # Examples:
+ def blank?
+ !attached?
+ end
+
+ # Attaches an +attachable+ to the record.
+ #
+ # If the record is persisted and unchanged, the attachment is saved to
+ # the database immediately. Otherwise, it'll be saved to the DB when the
+ # record is next saved.
#
# person.avatar.attach(params[:avatar]) # ActionDispatch::Http::UploadedFile object
# person.avatar.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
# person.avatar.attach(io: File.open("/path/to/face.jpg"), filename: "face.jpg", content_type: "image/jpg")
# person.avatar.attach(avatar_blob) # ActiveStorage::Blob object
def attach(attachable)
- if attached? && dependent == :purge_later
- replace attachable
+ if record.persisted? && !record.changed?
+ record.update(name => attachable)
else
- write_attachment create_attachment_from(attachable)
+ record.public_send("#{name}=", attachable)
end
end
@@ -39,6 +46,14 @@ module ActiveStorage
attachment.present?
end
+ # Deletes the attachment without purging it, leaving its blob in place.
+ def detach
+ if attached?
+ attachment.delete
+ write_attachment nil
+ end
+ end
+
# Directly purges the attachment (i.e. destroys the blob and
# attachment and deletes the file on the service).
def purge
@@ -52,27 +67,11 @@ module ActiveStorage
def purge_later
if attached?
attachment.purge_later
+ write_attachment nil
end
end
private
- def replace(attachable)
- blob.tap do
- transaction do
- destroy_attachment
- write_attachment create_attachment_from(attachable)
- end
- end.purge_later
- end
-
- def destroy_attachment
- attachment.destroy
- end
-
- def create_attachment_from(attachable)
- ActiveStorage::Attachment.create!(record: record, name: name, blob: create_blob_from(attachable))
- end
-
def write_attachment(attachment)
record.public_send("#{name}_attachment=", attachment)
end
diff --git a/activestorage/lib/active_storage/downloader.rb b/activestorage/lib/active_storage/downloader.rb
new file mode 100644
index 0000000000..87be6efb05
--- /dev/null
+++ b/activestorage/lib/active_storage/downloader.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ class Downloader #:nodoc:
+ def initialize(blob, tempdir: nil)
+ @blob = blob
+ @tempdir = tempdir
+ end
+
+ def download_blob_to_tempfile
+ open_tempfile do |file|
+ download_blob_to file
+ verify_integrity_of file
+ yield file
+ end
+ end
+
+ private
+ attr_reader :blob, :tempdir
+
+ def open_tempfile
+ file = Tempfile.open([ "ActiveStorage-#{blob.id}-", blob.filename.extension_with_delimiter ], tempdir)
+
+ begin
+ yield file
+ ensure
+ file.close!
+ end
+ end
+
+ def download_blob_to(file)
+ file.binmode
+ blob.download { |chunk| file.write(chunk) }
+ file.flush
+ file.rewind
+ end
+
+ def verify_integrity_of(file)
+ unless Digest::MD5.file(file).base64digest == blob.checksum
+ raise ActiveStorage::IntegrityError
+ end
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/downloading.rb b/activestorage/lib/active_storage/downloading.rb
index 3dac6b116a..df820bc088 100644
--- a/activestorage/lib/active_storage/downloading.rb
+++ b/activestorage/lib/active_storage/downloading.rb
@@ -1,25 +1,46 @@
# frozen_string_literal: true
+require "tmpdir"
+require "active_support/core_ext/string/filters"
+
module ActiveStorage
module Downloading
+ def self.included(klass)
+ ActiveSupport::Deprecation.warn <<~MESSAGE.squish, caller_locations(2)
+ ActiveStorage::Downloading is deprecated and will be removed in Active Storage 6.1.
+ Use ActiveStorage::Blob#open instead.
+ MESSAGE
+ end
+
private
# Opens a new tempfile in #tempdir and copies blob data into it. Yields the tempfile.
- def download_blob_to_tempfile # :doc:
- Tempfile.open("ActiveStorage", tempdir) do |file|
+ def download_blob_to_tempfile #:doc:
+ open_tempfile_for_blob do |file|
download_blob_to file
yield file
end
end
+ def open_tempfile_for_blob
+ tempfile = Tempfile.open([ "ActiveStorage", blob.filename.extension_with_delimiter ], tempdir)
+
+ begin
+ yield tempfile
+ ensure
+ tempfile.close!
+ end
+ end
+
# Efficiently downloads blob data into the given file.
- def download_blob_to(file) # :doc:
+ def download_blob_to(file) #:doc:
file.binmode
blob.download { |chunk| file.write(chunk) }
+ file.flush
file.rewind
end
# Returns the directory in which tempfiles should be opened. Defaults to +Dir.tmpdir+.
- def tempdir # :doc:
+ def tempdir #:doc:
Dir.tmpdir
end
end
diff --git a/activestorage/lib/active_storage/engine.rb b/activestorage/lib/active_storage/engine.rb
index 6cf6635c4f..9d6a27eabe 100644
--- a/activestorage/lib/active_storage/engine.rb
+++ b/activestorage/lib/active_storage/engine.rb
@@ -3,28 +3,58 @@
require "rails"
require "active_storage"
-require "active_storage/previewer/pdf_previewer"
+require "active_storage/previewer/poppler_pdf_previewer"
+require "active_storage/previewer/mupdf_previewer"
require "active_storage/previewer/video_previewer"
require "active_storage/analyzer/image_analyzer"
require "active_storage/analyzer/video_analyzer"
+require "active_storage/reflection"
+
module ActiveStorage
class Engine < Rails::Engine # :nodoc:
isolate_namespace ActiveStorage
config.active_storage = ActiveSupport::OrderedOptions.new
- config.active_storage.previewers = [ ActiveStorage::Previewer::PDFPreviewer, ActiveStorage::Previewer::VideoPreviewer ]
- config.active_storage.analyzers = [ ActiveStorage::Analyzer::ImageAnalyzer, ActiveStorage::Analyzer::VideoAnalyzer ]
+ config.active_storage.previewers = [ ActiveStorage::Previewer::PopplerPDFPreviewer, ActiveStorage::Previewer::MuPDFPreviewer, ActiveStorage::Previewer::VideoPreviewer ]
+ config.active_storage.analyzers = [ ActiveStorage::Analyzer::ImageAnalyzer, ActiveStorage::Analyzer::VideoAnalyzer ]
+ config.active_storage.paths = ActiveSupport::OrderedOptions.new
+
+ config.active_storage.variable_content_types = %w(
+ image/png
+ image/gif
+ image/jpg
+ image/jpeg
+ image/vnd.adobe.photoshop
+ image/vnd.microsoft.icon
+ )
+
+ config.active_storage.content_types_to_serve_as_binary = %w(
+ text/html
+ text/javascript
+ image/svg+xml
+ application/postscript
+ application/x-shockwave-flash
+ text/xml
+ application/xml
+ application/xhtml+xml
+ )
config.eager_load_namespaces << ActiveStorage
initializer "active_storage.configs" do
config.after_initialize do |app|
- ActiveStorage.logger = app.config.active_storage.logger || Rails.logger
- ActiveStorage.queue = app.config.active_storage.queue
- ActiveStorage.previewers = app.config.active_storage.previewers || []
- ActiveStorage.analyzers = app.config.active_storage.analyzers || []
+ ActiveStorage.logger = app.config.active_storage.logger || Rails.logger
+ ActiveStorage.queue = app.config.active_storage.queue
+ ActiveStorage.variant_processor = app.config.active_storage.variant_processor || :mini_magick
+ ActiveStorage.previewers = app.config.active_storage.previewers || []
+ ActiveStorage.analyzers = app.config.active_storage.analyzers || []
+ ActiveStorage.paths = app.config.active_storage.paths || {}
+
+ ActiveStorage.variable_content_types = app.config.active_storage.variable_content_types || []
+ ActiveStorage.content_types_to_serve_as_binary = app.config.active_storage.content_types_to_serve_as_binary || []
+ ActiveStorage.service_urls_expire_in = app.config.active_storage.service_urls_expire_in || 5.minutes
end
end
@@ -32,7 +62,7 @@ module ActiveStorage
require "active_storage/attached"
ActiveSupport.on_load(:active_record) do
- extend ActiveStorage::Attached::Macros
+ include ActiveStorage::Attached::Model
end
end
@@ -43,7 +73,7 @@ module ActiveStorage
end
initializer "active_storage.services" do
- config.to_prepare do
+ ActiveSupport.on_load(:active_storage_blob) do
if config_choice = Rails.configuration.active_storage.service
configs = Rails.configuration.active_storage.service_configurations ||= begin
config_file = Pathname.new(Rails.root.join("config/storage.yml"))
@@ -68,5 +98,12 @@ module ActiveStorage
end
end
end
+
+ initializer "active_storage.reflection" do
+ ActiveSupport.on_load(:active_record) do
+ include Reflection::ActiveRecordExtensions
+ ActiveRecord::Reflection.singleton_class.prepend(Reflection::ReflectionExtension)
+ end
+ end
end
end
diff --git a/activestorage/lib/active_storage/errors.rb b/activestorage/lib/active_storage/errors.rb
new file mode 100644
index 0000000000..f4bf66a615
--- /dev/null
+++ b/activestorage/lib/active_storage/errors.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ # Generic base class for all Active Storage exceptions.
+ class Error < StandardError; end
+
+ # Raised when ActiveStorage::Blob#variant is called on a blob that isn't variable.
+ # Use ActiveStorage::Blob#variable? to determine whether a blob is variable.
+ class InvariableError < Error; end
+
+ # Raised when ActiveStorage::Blob#preview is called on a blob that isn't previewable.
+ # Use ActiveStorage::Blob#previewable? to determine whether a blob is previewable.
+ class UnpreviewableError < Error; end
+
+ # Raised when ActiveStorage::Blob#representation is called on a blob that isn't representable.
+ # Use ActiveStorage::Blob#representable? to determine whether a blob is representable.
+ class UnrepresentableError < Error; end
+
+ # Raised when uploaded or downloaded data does not match a precomputed checksum.
+ # Indicates that a network error or a software bug caused data corruption.
+ class IntegrityError < Error; end
+end
diff --git a/activestorage/lib/active_storage/gem_version.rb b/activestorage/lib/active_storage/gem_version.rb
index e1d7b3493a..492620731b 100644
--- a/activestorage/lib/active_storage/gem_version.rb
+++ b/activestorage/lib/active_storage/gem_version.rb
@@ -7,8 +7,8 @@ module ActiveStorage
end
module VERSION
- MAJOR = 5
- MINOR = 2
+ MAJOR = 6
+ MINOR = 0
TINY = 0
PRE = "alpha"
diff --git a/activestorage/lib/active_storage/log_subscriber.rb b/activestorage/lib/active_storage/log_subscriber.rb
index 5cbf4bd1a5..6c0b4c30e7 100644
--- a/activestorage/lib/active_storage/log_subscriber.rb
+++ b/activestorage/lib/active_storage/log_subscriber.rb
@@ -14,10 +14,16 @@ module ActiveStorage
info event, color("Downloaded file from key: #{key_in(event)}", BLUE)
end
+ alias_method :service_streaming_download, :service_download
+
def service_delete(event)
info event, color("Deleted file from key: #{key_in(event)}", RED)
end
+ def service_delete_prefixed(event)
+ info event, color("Deleted files by key prefix: #{event.payload[:prefix]}", RED)
+ end
+
def service_exist(event)
debug event, color("Checked if file exists at key: #{key_in(event)} (#{event.payload[:exist] ? "yes" : "no"})", BLUE)
end
diff --git a/activestorage/lib/active_storage/previewer.rb b/activestorage/lib/active_storage/previewer.rb
index ed75bae3b5..95a041fd16 100644
--- a/activestorage/lib/active_storage/previewer.rb
+++ b/activestorage/lib/active_storage/previewer.rb
@@ -1,14 +1,10 @@
# frozen_string_literal: true
-require "active_storage/downloading"
-
module ActiveStorage
# This is an abstract base class for previewers, which generate images from blobs. See
- # ActiveStorage::Previewer::PDFPreviewer and ActiveStorage::Previewer::VideoPreviewer for examples of
- # concrete subclasses.
+ # ActiveStorage::Previewer::MuPDFPreviewer and ActiveStorage::Previewer::VideoPreviewer for
+ # examples of concrete subclasses.
class Previewer
- include Downloading
-
attr_reader :blob
# Implement this method in a concrete subclass. Have it return true when given a blob from which
@@ -28,9 +24,14 @@ module ActiveStorage
end
private
+ # Downloads the blob to a tempfile on disk. Yields the tempfile.
+ def download_blob_to_tempfile(&block) #:doc:
+ blob.open tempdir: tempdir, &block
+ end
+
# Executes a system command, capturing its binary output in a tempfile. Yields the tempfile.
#
- # Use this method to shell out to a system library (e.g. mupdf or ffmpeg) for preview image
+ # Use this method to shell out to a system library (e.g. muPDF or FFmpeg) for preview image
# generation. The resulting tempfile can be used as the +:io+ value in an attachable Hash:
#
# def preview
@@ -41,18 +42,43 @@ module ActiveStorage
# end
# end
#
- # The output tempfile is opened in the directory returned by ActiveStorage::Downloading#tempdir.
- def draw(*argv) # :doc:
- Tempfile.open("ActiveStorage", tempdir) do |file|
- capture(*argv, to: file)
+ # The output tempfile is opened in the directory returned by #tempdir.
+ def draw(*argv) #:doc:
+ open_tempfile do |file|
+ instrument :preview, key: blob.key do
+ capture(*argv, to: file)
+ end
+
yield file
end
end
+ def open_tempfile
+ tempfile = Tempfile.open("ActiveStorage-", tempdir)
+
+ begin
+ yield tempfile
+ ensure
+ tempfile.close!
+ end
+ end
+
+ def instrument(operation, payload = {}, &block)
+ ActiveSupport::Notifications.instrument "#{operation}.active_storage", payload, &block
+ end
+
def capture(*argv, to:)
to.binmode
- IO.popen(argv) { |out| IO.copy_stream(out, to) }
+ IO.popen(argv, err: File::NULL) { |out| IO.copy_stream(out, to) }
to.rewind
end
+
+ def logger #:doc:
+ ActiveStorage.logger
+ end
+
+ def tempdir #:doc:
+ Dir.tmpdir
+ end
end
end
diff --git a/activestorage/lib/active_storage/previewer/mupdf_previewer.rb b/activestorage/lib/active_storage/previewer/mupdf_previewer.rb
new file mode 100644
index 0000000000..ae02a4889d
--- /dev/null
+++ b/activestorage/lib/active_storage/previewer/mupdf_previewer.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ class Previewer::MuPDFPreviewer < Previewer
+ class << self
+ def accept?(blob)
+ blob.content_type == "application/pdf" && mutool_exists?
+ end
+
+ def mutool_path
+ ActiveStorage.paths[:mutool] || "mutool"
+ end
+
+ def mutool_exists?
+ return @mutool_exists unless @mutool_exists.nil?
+
+ system mutool_path, out: File::NULL, err: File::NULL
+
+ @mutool_exists = $?.exitstatus == 1
+ end
+ end
+
+ def preview
+ download_blob_to_tempfile do |input|
+ draw_first_page_from input do |output|
+ yield io: output, filename: "#{blob.filename.base}.png", content_type: "image/png"
+ end
+ end
+ end
+
+ private
+ def draw_first_page_from(file, &block)
+ draw self.class.mutool_path, "draw", "-F", "png", "-o", "-", file.path, "1", &block
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/previewer/pdf_previewer.rb b/activestorage/lib/active_storage/previewer/pdf_previewer.rb
deleted file mode 100644
index a2f05c74a6..0000000000
--- a/activestorage/lib/active_storage/previewer/pdf_previewer.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-module ActiveStorage
- class Previewer::PDFPreviewer < Previewer
- def self.accept?(blob)
- blob.content_type == "application/pdf"
- end
-
- def preview
- download_blob_to_tempfile do |input|
- draw "mutool", "draw", "-F", "png", "-o", "-", input.path, "1" do |output|
- yield io: output, filename: "#{blob.filename.base}.png", content_type: "image/png"
- end
- end
- end
- end
-end
diff --git a/activestorage/lib/active_storage/previewer/poppler_pdf_previewer.rb b/activestorage/lib/active_storage/previewer/poppler_pdf_previewer.rb
new file mode 100644
index 0000000000..69eb617d7b
--- /dev/null
+++ b/activestorage/lib/active_storage/previewer/poppler_pdf_previewer.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ class Previewer::PopplerPDFPreviewer < Previewer
+ class << self
+ def accept?(blob)
+ blob.content_type == "application/pdf" && pdftoppm_exists?
+ end
+
+ def pdftoppm_path
+ ActiveStorage.paths[:pdftoppm] || "pdftoppm"
+ end
+
+ def pdftoppm_exists?
+ return @pdftoppm_exists if defined?(@pdftoppm_exists)
+
+ @pdftoppm_exists = system(pdftoppm_path, "-v", out: File::NULL, err: File::NULL)
+ end
+ end
+
+ def preview
+ download_blob_to_tempfile do |input|
+ draw_first_page_from input do |output|
+ yield io: output, filename: "#{blob.filename.base}.png", content_type: "image/png"
+ end
+ end
+ end
+
+ private
+ def draw_first_page_from(file, &block)
+ # use 72 dpi to match thumbnail dimesions of the PDF
+ draw self.class.pdftoppm_path, "-singlefile", "-r", "72", "-png", file.path, &block
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/previewer/video_previewer.rb b/activestorage/lib/active_storage/previewer/video_previewer.rb
index 49f128d142..50e13d202a 100644
--- a/activestorage/lib/active_storage/previewer/video_previewer.rb
+++ b/activestorage/lib/active_storage/previewer/video_previewer.rb
@@ -9,15 +9,18 @@ module ActiveStorage
def preview
download_blob_to_tempfile do |input|
draw_relevant_frame_from input do |output|
- yield io: output, filename: "#{blob.filename.base}.png", content_type: "image/png"
+ yield io: output, filename: "#{blob.filename.base}.jpg", content_type: "image/jpeg"
end
end
end
private
def draw_relevant_frame_from(file, &block)
- draw "ffmpeg", "-i", file.path, "-y", "-vcodec", "png",
- "-vf", "thumbnail", "-vframes", "1", "-f", "image2", "-", &block
+ draw ffmpeg_path, "-i", file.path, "-y", "-vframes", "1", "-f", "image2", "-", &block
+ end
+
+ def ffmpeg_path
+ ActiveStorage.paths[:ffmpeg] || "ffmpeg"
end
end
end
diff --git a/activestorage/lib/active_storage/reflection.rb b/activestorage/lib/active_storage/reflection.rb
new file mode 100644
index 0000000000..ce248c88b5
--- /dev/null
+++ b/activestorage/lib/active_storage/reflection.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ module Reflection
+ # Holds all the metadata about a has_one_attached attachment as it was
+ # specified in the Active Record class.
+ class HasOneAttachedReflection < ActiveRecord::Reflection::MacroReflection #:nodoc:
+ def macro
+ :has_one_attached
+ end
+ end
+
+ # Holds all the metadata about a has_many_attached attachment as it was
+ # specified in the Active Record class.
+ class HasManyAttachedReflection < ActiveRecord::Reflection::MacroReflection #:nodoc:
+ def macro
+ :has_many_attached
+ end
+ end
+
+ module ReflectionExtension # :nodoc:
+ def add_attachment_reflection(model, name, reflection)
+ model.attachment_reflections = model.attachment_reflections.merge(name.to_s => reflection)
+ end
+
+ private
+ def reflection_class_for(macro)
+ case macro
+ when :has_one_attached
+ HasOneAttachedReflection
+ when :has_many_attached
+ HasManyAttachedReflection
+ else
+ super
+ end
+ end
+ end
+
+ module ActiveRecordExtensions
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :attachment_reflections, instance_writer: false, default: {}
+ end
+
+ module ClassMethods
+ # Returns an array of reflection objects for all the attachments in the
+ # class.
+ def reflect_on_all_attachments
+ attachment_reflections.values
+ end
+
+ # Returns the reflection object for the named +attachment+.
+ #
+ # User.reflect_on_attachment(:avatar)
+ # # => the avatar reflection
+ #
+ def reflect_on_attachment(attachment)
+ attachment_reflections[attachment.to_s]
+ end
+ end
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/service.rb b/activestorage/lib/active_storage/service.rb
index aa150e4d8a..f915518f52 100644
--- a/activestorage/lib/active_storage/service.rb
+++ b/activestorage/lib/active_storage/service.rb
@@ -3,8 +3,6 @@
require "active_storage/log_subscriber"
module ActiveStorage
- class IntegrityError < StandardError; end
-
# Abstract class serving as an interface for concrete services.
#
# The available services are:
@@ -41,8 +39,6 @@ module ActiveStorage
extend ActiveSupport::Autoload
autoload :Configurator
- class_attribute :url_expires_in, default: 5.minutes
-
class << self
# Configure an Active Storage service by name from a set of configurations,
# typically loaded from a YAML file. The Active Storage engine uses this
@@ -73,18 +69,28 @@ module ActiveStorage
raise NotImplementedError
end
+ # Return the partial content in the byte +range+ of the file at the +key+.
+ def download_chunk(key, range)
+ raise NotImplementedError
+ end
+
# Delete the file at the +key+.
def delete(key)
raise NotImplementedError
end
+ # Delete files at keys starting with the +prefix+.
+ def delete_prefixed(prefix)
+ raise NotImplementedError
+ end
+
# Return +true+ if a file exists at the +key+.
def exist?(key)
raise NotImplementedError
end
# Returns a signed, temporary URL for the file at the +key+. The URL will be valid for the amount
- # of seconds specified in +expires_in+. You most also provide the +disposition+ (+:inline+ or +:attachment+),
+ # of seconds specified in +expires_in+. You must also provide the +disposition+ (+:inline+ or +:attachment+),
# +filename+, and +content_type+ that you wish the file to be served with on request.
def url(key, expires_in:, disposition:, filename:, content_type:)
raise NotImplementedError
@@ -92,7 +98,7 @@ module ActiveStorage
# Returns a signed, temporary URL that a direct upload file can be PUT to on the +key+.
# The URL will be valid for the amount of seconds specified in +expires_in+.
- # You most also provide the +content_type+, +content_length+, and +checksum+ of the file
+ # You must also provide the +content_type+, +content_length+, and +checksum+ of the file
# that will be uploaded. All these attributes will be validated by the service upon upload.
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
raise NotImplementedError
@@ -104,10 +110,10 @@ module ActiveStorage
end
private
- def instrument(operation, key, payload = {}, &block)
+ def instrument(operation, payload = {}, &block)
ActiveSupport::Notifications.instrument(
"service_#{operation}.active_storage",
- payload.merge(key: key, service: service_name), &block)
+ payload.merge(service: service_name), &block)
end
def service_name
diff --git a/activestorage/lib/active_storage/service/azure_storage_service.rb b/activestorage/lib/active_storage/service/azure_storage_service.rb
index f3877ad9c9..b26234c722 100644
--- a/activestorage/lib/active_storage/service/azure_storage_service.rb
+++ b/activestorage/lib/active_storage/service/azure_storage_service.rb
@@ -8,20 +8,19 @@ module ActiveStorage
# Wraps the Microsoft Azure Storage Blob Service as an Active Storage service.
# See ActiveStorage::Service for the generic API documentation that applies to all services.
class Service::AzureStorageService < Service
- attr_reader :client, :path, :blobs, :container, :signer
+ attr_reader :client, :blobs, :container, :signer
- def initialize(path:, storage_account_name:, storage_access_key:, container:)
+ def initialize(storage_account_name:, storage_access_key:, container:)
@client = Azure::Storage::Client.create(storage_account_name: storage_account_name, storage_access_key: storage_access_key)
@signer = Azure::Storage::Core::Auth::SharedAccessSignature.new(storage_account_name, storage_access_key)
@blobs = client.blob_client
@container = container
- @path = path
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
- blobs.create_block_blob(container, key, io, content_md5: checksum)
+ blobs.create_block_blob(container, key, IO.try_convert(io) || io, content_md5: checksum)
rescue Azure::Core::Http::HTTPError
raise ActiveStorage::IntegrityError
end
@@ -30,29 +29,52 @@ module ActiveStorage
def download(key, &block)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
stream(key, &block)
end
else
- instrument :download, key do
+ instrument :download, key: key do
_, io = blobs.get_blob(container, key)
io.force_encoding(Encoding::BINARY)
end
end
end
+ def download_chunk(key, range)
+ instrument :download_chunk, key: key, range: range do
+ _, io = blobs.get_blob(container, key, start_range: range.begin, end_range: range.exclude_end? ? range.end - 1 : range.end)
+ io.force_encoding(Encoding::BINARY)
+ end
+ end
+
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
begin
blobs.delete_blob(container, key)
rescue Azure::Core::Http::HTTPError
- false
+ # Ignore files already deleted
+ end
+ end
+ end
+
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ marker = nil
+
+ loop do
+ results = blobs.list_blobs(container, prefix: prefix, marker: marker)
+
+ results.each do |blob|
+ blobs.delete_blob(container, blob.name)
+ end
+
+ break unless marker = results.continuation_token.presence
end
end
end
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = blob_for(key).present?
payload[:exist] = answer
answer
@@ -60,10 +82,10 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
- base_url = url_for(key)
+ instrument :url, key: key do |payload|
generated_url = signer.signed_uri(
- URI(base_url), false,
+ uri_for(key), false,
+ service: "b",
permissions: "r",
expiry: format_expiry(expires_in),
content_disposition: content_disposition_with(type: disposition, filename: filename),
@@ -77,10 +99,13 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- base_url = url_for(key)
- generated_url = signer.signed_uri(URI(base_url), false, permissions: "rw",
- expiry: format_expiry(expires_in)).to_s
+ instrument :url, key: key do |payload|
+ generated_url = signer.signed_uri(
+ uri_for(key), false,
+ service: "b",
+ permissions: "rw",
+ expiry: format_expiry(expires_in)
+ ).to_s
payload[:url] = generated_url
@@ -93,8 +118,8 @@ module ActiveStorage
end
private
- def url_for(key)
- "#{path}/#{container}/#{key}"
+ def uri_for(key)
+ blobs.generate_uri("#{container}/#{key}")
end
def blob_for(key)
diff --git a/activestorage/lib/active_storage/service/configurator.rb b/activestorage/lib/active_storage/service/configurator.rb
index 39951fd026..fa80c66c3b 100644
--- a/activestorage/lib/active_storage/service/configurator.rb
+++ b/activestorage/lib/active_storage/service/configurator.rb
@@ -26,7 +26,9 @@ module ActiveStorage
def resolve(class_name)
require "active_storage/service/#{class_name.to_s.underscore}_service"
- ActiveStorage::Service.const_get(:"#{class_name}Service")
+ ActiveStorage::Service.const_get(:"#{class_name.camelize}Service")
+ rescue LoadError
+ raise "Missing service adapter for #{class_name.inspect}"
end
end
end
diff --git a/activestorage/lib/active_storage/service/disk_service.rb b/activestorage/lib/active_storage/service/disk_service.rb
index 52eaba4e7b..9f304b7e01 100644
--- a/activestorage/lib/active_storage/service/disk_service.rb
+++ b/activestorage/lib/active_storage/service/disk_service.rb
@@ -16,7 +16,7 @@ module ActiveStorage
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
IO.copy_stream(io, make_path_for(key))
ensure_integrity_of(key, checksum) if checksum
end
@@ -24,22 +24,31 @@ module ActiveStorage
def download(key)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
File.open(path_for(key), "rb") do |file|
- while data = file.read(64.kilobytes)
+ while data = file.read(5.megabytes)
yield data
end
end
end
else
- instrument :download, key do
+ instrument :download, key: key do
File.binread path_for(key)
end
end
end
+ def download_chunk(key, range)
+ instrument :download_chunk, key: key, range: range do
+ File.open(path_for(key), "rb") do |file|
+ file.seek range.begin
+ file.read range.size
+ end
+ end
+ end
+
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
begin
File.delete path_for(key)
rescue Errno::ENOENT
@@ -48,8 +57,16 @@ module ActiveStorage
end
end
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ Dir.glob(path_for("#{prefix}*")).each do |path|
+ FileUtils.rm_rf(path)
+ end
+ end
+ end
+
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = File.exist? path_for(key)
payload[:exist] = answer
answer
@@ -57,18 +74,17 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
verified_key_with_expiration = ActiveStorage.verifier.generate(key, expires_in: expires_in, purpose: :blob_key)
generated_url =
- if defined?(Rails.application)
- Rails.application.routes.url_helpers.rails_disk_service_path \
- verified_key_with_expiration,
- filename: filename, disposition: content_disposition_with(type: disposition, filename: filename), content_type: content_type
- else
- "/rails/active_storage/disk/#{verified_key_with_expiration}/#{filename}?content_type=#{content_type}" \
- "&disposition=#{content_disposition_with(type: disposition, filename: filename)}"
- end
+ url_helpers.rails_disk_service_url(
+ verified_key_with_expiration,
+ host: current_host,
+ filename: filename,
+ disposition: content_disposition_with(type: disposition, filename: filename),
+ content_type: content_type
+ )
payload[:url] = generated_url
@@ -77,7 +93,7 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
verified_token_with_expiration = ActiveStorage.verifier.generate(
{
key: key,
@@ -89,12 +105,7 @@ module ActiveStorage
purpose: :blob_token }
)
- generated_url =
- if defined?(Rails.application)
- Rails.application.routes.url_helpers.update_rails_disk_service_path verified_token_with_expiration
- else
- "/rails/active_storage/disk/#{verified_token_with_expiration}"
- end
+ generated_url = url_helpers.update_rails_disk_service_url(verified_token_with_expiration, host: current_host)
payload[:url] = generated_url
@@ -106,11 +117,11 @@ module ActiveStorage
{ "Content-Type" => content_type }
end
- private
- def path_for(key)
- File.join root, folder_for(key), key
- end
+ def path_for(key) #:nodoc:
+ File.join root, folder_for(key), key
+ end
+ private
def folder_for(key)
[ key[0..1], key[2..3] ].join("/")
end
@@ -125,5 +136,13 @@ module ActiveStorage
raise ActiveStorage::IntegrityError
end
end
+
+ def url_helpers
+ @url_helpers ||= Rails.application.routes.url_helpers
+ end
+
+ def current_host
+ ActiveStorage::Current.host
+ end
end
end
diff --git a/activestorage/lib/active_storage/service/gcs_service.rb b/activestorage/lib/active_storage/service/gcs_service.rb
index b4ffeeeb8a..eb46973509 100644
--- a/activestorage/lib/active_storage/service/gcs_service.rb
+++ b/activestorage/lib/active_storage/service/gcs_service.rb
@@ -1,45 +1,52 @@
# frozen_string_literal: true
+gem "google-cloud-storage", "~> 1.11"
require "google/cloud/storage"
-require "active_support/core_ext/object/to_query"
module ActiveStorage
# Wraps the Google Cloud Storage as an Active Storage service. See ActiveStorage::Service for the generic API
# documentation that applies to all services.
class Service::GCSService < Service
- attr_reader :client, :bucket
-
- def initialize(project:, keyfile:, bucket:, **options)
- @client = Google::Cloud::Storage.new(project: project, keyfile: keyfile, **options)
- @bucket = @client.bucket(bucket)
+ def initialize(**config)
+ @config = config
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
- bucket.create_file(io, key, md5: checksum)
+ # The official GCS client library doesn't allow us to create a file with no Content-Type metadata.
+ # We need the file we create to have no Content-Type so we can control it via the response-content-type
+ # param in signed URLs. Workaround: let the GCS client create the file with an inferred
+ # Content-Type (usually "application/octet-stream") then clear it.
+ bucket.create_file(io, key, md5: checksum).update do |file|
+ file.content_type = nil
+ end
rescue Google::Cloud::InvalidArgumentError
raise ActiveStorage::IntegrityError
end
end
end
- # FIXME: Download in chunks when given a block.
- def download(key)
- instrument :download, key do
- io = file_for(key).download
- io.rewind
-
- if block_given?
- yield io.read
- else
- io.read
+ def download(key, &block)
+ if block_given?
+ instrument :streaming_download, key: key do
+ stream(key, &block)
+ end
+ else
+ instrument :download, key: key do
+ file_for(key).download.string
end
end
end
+ def download_chunk(key, range)
+ instrument :download_chunk, key: key, range: range do
+ file_for(key).download(range: range).string
+ end
+ end
+
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
begin
file_for(key).delete
rescue Google::Cloud::NotFoundError
@@ -48,8 +55,20 @@ module ActiveStorage
end
end
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ bucket.files(prefix: prefix).all do |file|
+ begin
+ file.delete
+ rescue Google::Cloud::NotFoundError
+ # Ignore concurrently-deleted files
+ end
+ end
+ end
+ end
+
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = file_for(key).exists?
payload[:exist] = answer
answer
@@ -57,7 +76,7 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, content_type:, disposition:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
generated_url = file_for(key).signed_url expires: expires_in, query: {
"response-content-disposition" => content_disposition_with(type: disposition, filename: filename),
"response-content-type" => content_type
@@ -69,10 +88,9 @@ module ActiveStorage
end
end
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- generated_url = bucket.signed_url key, method: "PUT", expires: expires_in,
- content_type: content_type, content_md5: checksum
+ def url_for_direct_upload(key, expires_in:, checksum:, **)
+ instrument :url, key: key do |payload|
+ generated_url = bucket.signed_url key, method: "PUT", expires: expires_in, content_md5: checksum
payload[:url] = generated_url
@@ -80,13 +98,36 @@ module ActiveStorage
end
end
- def headers_for_direct_upload(key, content_type:, checksum:, **)
- { "Content-Type" => content_type, "Content-MD5" => checksum }
+ def headers_for_direct_upload(key, checksum:, **)
+ { "Content-MD5" => checksum }
end
private
- def file_for(key)
- bucket.file(key, skip_lookup: true)
+ attr_reader :config
+
+ def file_for(key, skip_lookup: true)
+ bucket.file(key, skip_lookup: skip_lookup)
+ end
+
+ # Reads the file for the given key in chunks, yielding each to the block.
+ def stream(key)
+ file = file_for(key, skip_lookup: false)
+
+ chunk_size = 5.megabytes
+ offset = 0
+
+ while offset < file.size
+ yield file.download(range: offset..(offset + chunk_size - 1)).string
+ offset += chunk_size
+ end
+ end
+
+ def bucket
+ @bucket ||= client.bucket(config.fetch(:bucket))
+ end
+
+ def client
+ @client ||= Google::Cloud::Storage.new(config.except(:bucket))
end
end
end
diff --git a/activestorage/lib/active_storage/service/mirror_service.rb b/activestorage/lib/active_storage/service/mirror_service.rb
index 39e922f7ab..6002ef5a00 100644
--- a/activestorage/lib/active_storage/service/mirror_service.rb
+++ b/activestorage/lib/active_storage/service/mirror_service.rb
@@ -9,7 +9,7 @@ module ActiveStorage
class Service::MirrorService < Service
attr_reader :primary, :mirrors
- delegate :download, :exist?, :url, to: :primary
+ delegate :download, :download_chunk, :exist?, :url, to: :primary
# Stitch together from named services.
def self.build(primary:, mirrors:, configurator:, **options) #:nodoc:
@@ -35,6 +35,11 @@ module ActiveStorage
perform_across_services :delete, key
end
+ # Delete files at keys starting with the +prefix+ on all services.
+ def delete_prefixed(prefix)
+ perform_across_services :delete_prefixed, prefix
+ end
+
private
def each_service(&block)
[ primary, *mirrors ].each(&block)
diff --git a/activestorage/lib/active_storage/service/s3_service.rb b/activestorage/lib/active_storage/service/s3_service.rb
index 6957119780..0286e7ff21 100644
--- a/activestorage/lib/active_storage/service/s3_service.rb
+++ b/activestorage/lib/active_storage/service/s3_service.rb
@@ -9,15 +9,15 @@ module ActiveStorage
class Service::S3Service < Service
attr_reader :client, :bucket, :upload_options
- def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
- @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
+ def initialize(bucket:, upload: {}, **options)
+ @client = Aws::S3::Resource.new(**options)
@bucket = @client.bucket(bucket)
@upload_options = upload
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
rescue Aws::S3::Errors::BadDigest
@@ -28,24 +28,36 @@ module ActiveStorage
def download(key, &block)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
stream(key, &block)
end
else
- instrument :download, key do
- object_for(key).get.body.read.force_encoding(Encoding::BINARY)
+ instrument :download, key: key do
+ object_for(key).get.body.string.force_encoding(Encoding::BINARY)
end
end
end
+ def download_chunk(key, range)
+ instrument :download_chunk, key: key, range: range do
+ object_for(key).get(range: "bytes=#{range.begin}-#{range.exclude_end? ? range.end - 1 : range.end}").body.read.force_encoding(Encoding::BINARY)
+ end
+ end
+
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
object_for(key).delete
end
end
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ bucket.objects(prefix: prefix).batch_delete!
+ end
+ end
+
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = object_for(key).exists?
payload[:exist] = answer
answer
@@ -53,7 +65,7 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
generated_url = object_for(key).presigned_url :get, expires_in: expires_in.to_i,
response_content_disposition: content_disposition_with(type: disposition, filename: filename),
response_content_type: content_type
@@ -65,7 +77,7 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
generated_url = object_for(key).presigned_url :put, expires_in: expires_in.to_i,
content_type: content_type, content_length: content_length, content_md5: checksum
diff --git a/activestorage/lib/active_storage/transformers/image_processing_transformer.rb b/activestorage/lib/active_storage/transformers/image_processing_transformer.rb
new file mode 100644
index 0000000000..7f8685b72d
--- /dev/null
+++ b/activestorage/lib/active_storage/transformers/image_processing_transformer.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require "image_processing"
+
+module ActiveStorage
+ module Transformers
+ class ImageProcessingTransformer < Transformer
+ private
+ def process(file, format:)
+ processor.
+ source(file).
+ loader(page: 0).
+ convert(format).
+ apply(operations).
+ call
+ end
+
+ def processor
+ ImageProcessing.const_get(ActiveStorage.variant_processor.to_s.camelize)
+ end
+
+ def operations
+ transformations.each_with_object([]) do |(name, argument), list|
+ if name.to_s == "combine_options"
+ ActiveSupport::Deprecation.warn <<~WARNING
+ Active Storage's ImageProcessing transformer doesn't support :combine_options,
+ as it always generates a single ImageMagick command. Passing :combine_options will
+ not be supported in Rails 6.1.
+ WARNING
+
+ list.concat argument.keep_if { |key, value| value.present? }.to_a
+ elsif argument.present?
+ list << [ name, argument ]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/transformers/mini_magick_transformer.rb b/activestorage/lib/active_storage/transformers/mini_magick_transformer.rb
new file mode 100644
index 0000000000..e8e99cea9e
--- /dev/null
+++ b/activestorage/lib/active_storage/transformers/mini_magick_transformer.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require "mini_magick"
+
+module ActiveStorage
+ module Transformers
+ class MiniMagickTransformer < Transformer
+ private
+ def process(file, format:)
+ image = MiniMagick::Image.new(file.path, file)
+
+ transformations.each do |name, argument_or_subtransformations|
+ image.mogrify do |command|
+ if name.to_s == "combine_options"
+ argument_or_subtransformations.each do |subtransformation_name, subtransformation_argument|
+ pass_transform_argument(command, subtransformation_name, subtransformation_argument)
+ end
+ else
+ pass_transform_argument(command, name, argument_or_subtransformations)
+ end
+ end
+ end
+
+ image.format(format) if format
+
+ image.tempfile.tap(&:open)
+ end
+
+ def pass_transform_argument(command, method, argument)
+ if argument == true
+ command.public_send(method)
+ elsif argument.present?
+ command.public_send(method, argument)
+ end
+ end
+ end
+ end
+end
diff --git a/activestorage/lib/active_storage/transformers/transformer.rb b/activestorage/lib/active_storage/transformers/transformer.rb
new file mode 100644
index 0000000000..2e21201004
--- /dev/null
+++ b/activestorage/lib/active_storage/transformers/transformer.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module ActiveStorage
+ module Transformers
+ # A Transformer applies a set of transformations to an image.
+ #
+ # The following concrete subclasses are included in Active Storage:
+ #
+ # * ActiveStorage::Transformers::ImageProcessingTransformer:
+ # backed by ImageProcessing, a common interface for MiniMagick and ruby-vips
+ #
+ # * ActiveStorage::Transformers::MiniMagickTransformer:
+ # backed by MiniMagick, a wrapper around the ImageMagick CLI
+ class Transformer
+ attr_reader :transformations
+
+ def initialize(transformations)
+ @transformations = transformations
+ end
+
+ # Applies the transformations to the source image in +file+, producing a target image in the
+ # specified +format+. Yields an open Tempfile containing the target image. Closes and unlinks
+ # the output tempfile after yielding to the given block. Returns the result of the block.
+ def transform(file, format:)
+ output = process(file, format: format)
+
+ begin
+ yield output
+ ensure
+ output.close!
+ end
+ end
+
+ private
+ # Returns an open Tempfile containing a transformed image in the given +format+.
+ # All subclasses implement this method.
+ def process(file, format:) #:doc:
+ raise NotImplementedError
+ end
+ end
+ end
+end
diff --git a/activestorage/lib/tasks/activestorage.rake b/activestorage/lib/tasks/activestorage.rake
index ef923e5926..ac254d717f 100644
--- a/activestorage/lib/tasks/activestorage.rake
+++ b/activestorage/lib/tasks/activestorage.rake
@@ -1,8 +1,15 @@
# frozen_string_literal: true
namespace :active_storage do
+ # Prevent migration installation task from showing up twice.
+ Rake::Task["install:migrations"].clear_comments
+
desc "Copy over the migration needed to the application"
task install: :environment do
- Rake::Task["active_storage:install:migrations"].invoke
+ if Rake::Task.task_defined?("active_storage:install:migrations")
+ Rake::Task["active_storage:install:migrations"].invoke
+ else
+ Rake::Task["app:active_storage:install:migrations"].invoke
+ end
end
end
diff --git a/activestorage/package.json b/activestorage/package.json
index 8e6dd1c57f..00876985cf 100644
--- a/activestorage/package.json
+++ b/activestorage/package.json
@@ -1,10 +1,11 @@
{
"name": "activestorage",
- "version": "5.2.0-alpha",
+ "version": "6.0.0-alpha",
"description": "Attach cloud and local files in Rails applications",
"main": "app/assets/javascripts/activestorage.js",
"files": [
- "app/assets/javascripts/*.js"
+ "app/assets/javascripts/*.js",
+ "src/*.js"
],
"homepage": "http://rubyonrails.org/",
"repository": {
@@ -16,18 +17,25 @@
},
"author": "Javan Makhmali <javan@javan.us>",
"license": "MIT",
+ "dependencies": {
+ "spark-md5": "^3.0.0"
+ },
"devDependencies": {
"babel-core": "^6.25.0",
- "babel-loader": "^7.1.1",
+ "babel-plugin-external-helpers": "^6.22.0",
"babel-preset-env": "^1.6.0",
"eslint": "^4.3.0",
"eslint-plugin-import": "^2.7.0",
- "spark-md5": "^3.0.0",
- "webpack": "^3.4.0"
+ "rollup": "^0.58.2",
+ "rollup-plugin-babel": "^3.0.4",
+ "rollup-plugin-commonjs": "^9.1.0",
+ "rollup-plugin-node-resolve": "^3.3.0",
+ "rollup-plugin-uglify": "^3.0.0"
},
"scripts": {
"prebuild": "yarn lint",
- "build": "webpack -p",
- "lint": "eslint app/javascript"
+ "build": "rollup --config rollup.config.js",
+ "lint": "eslint app/javascript",
+ "prepublishOnly": "rm -rf src && cp -R app/javascript/activestorage src"
}
}
diff --git a/activestorage/rollup.config.js b/activestorage/rollup.config.js
new file mode 100644
index 0000000000..1b4f9477ab
--- /dev/null
+++ b/activestorage/rollup.config.js
@@ -0,0 +1,28 @@
+import resolve from "rollup-plugin-node-resolve"
+import commonjs from "rollup-plugin-commonjs"
+import babel from "rollup-plugin-babel"
+import uglify from "rollup-plugin-uglify"
+
+const uglifyOptions = {
+ mangle: false,
+ compress: false,
+ output: {
+ beautify: true,
+ indent_level: 2
+ }
+}
+
+export default {
+ input: "app/javascript/activestorage/index.js",
+ output: {
+ file: "app/assets/javascripts/activestorage.js",
+ format: "umd",
+ name: "ActiveStorage"
+ },
+ plugins: [
+ resolve(),
+ commonjs(),
+ babel(),
+ uglify(uglifyOptions)
+ ]
+}
diff --git a/activestorage/test/analyzer/image_analyzer_test.rb b/activestorage/test/analyzer/image_analyzer_test.rb
index 9087072215..55bb5e7280 100644
--- a/activestorage/test/analyzer/image_analyzer_test.rb
+++ b/activestorage/test/analyzer/image_analyzer_test.rb
@@ -6,11 +6,27 @@ require "database/setup"
require "active_storage/analyzer/image_analyzer"
class ActiveStorage::Analyzer::ImageAnalyzerTest < ActiveSupport::TestCase
- test "analyzing an image" do
+ test "analyzing a JPEG image" do
blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg")
- metadata = blob.tap(&:analyze).metadata
+ metadata = extract_metadata_from(blob)
assert_equal 4104, metadata[:width]
assert_equal 2736, metadata[:height]
end
+
+ test "analyzing a rotated JPEG image" do
+ blob = create_file_blob(filename: "racecar_rotated.jpg", content_type: "image/jpeg")
+ metadata = extract_metadata_from(blob)
+
+ assert_equal 2736, metadata[:width]
+ assert_equal 4104, metadata[:height]
+ end
+
+ test "analyzing an SVG image without an XML declaration" do
+ blob = create_file_blob(filename: "icon.svg", content_type: "image/svg+xml")
+ metadata = extract_metadata_from(blob)
+
+ assert_equal 792, metadata[:width]
+ assert_equal 584, metadata[:height]
+ end
end
diff --git a/activestorage/test/analyzer/video_analyzer_test.rb b/activestorage/test/analyzer/video_analyzer_test.rb
index 4a3c4a8bfc..d30f49315a 100644
--- a/activestorage/test/analyzer/video_analyzer_test.rb
+++ b/activestorage/test/analyzer/video_analyzer_test.rb
@@ -8,28 +8,47 @@ require "active_storage/analyzer/video_analyzer"
class ActiveStorage::Analyzer::VideoAnalyzerTest < ActiveSupport::TestCase
test "analyzing a video" do
blob = create_file_blob(filename: "video.mp4", content_type: "video/mp4")
- metadata = blob.tap(&:analyze).metadata
+ metadata = extract_metadata_from(blob)
assert_equal 640, metadata[:width]
assert_equal 480, metadata[:height]
- assert_equal [4, 3], metadata[:aspect_ratio]
+ assert_equal [4, 3], metadata[:display_aspect_ratio]
assert_equal 5.166648, metadata[:duration]
assert_not_includes metadata, :angle
end
test "analyzing a rotated video" do
blob = create_file_blob(filename: "rotated_video.mp4", content_type: "video/mp4")
- metadata = blob.tap(&:analyze).metadata
+ metadata = extract_metadata_from(blob)
- assert_equal 640, metadata[:width]
- assert_equal 480, metadata[:height]
- assert_equal [4, 3], metadata[:aspect_ratio]
+ assert_equal 480, metadata[:width]
+ assert_equal 640, metadata[:height]
+ assert_equal [4, 3], metadata[:display_aspect_ratio]
assert_equal 5.227975, metadata[:duration]
assert_equal 90, metadata[:angle]
end
+ test "analyzing a video with rectangular samples" do
+ blob = create_file_blob(filename: "video_with_rectangular_samples.mp4", content_type: "video/mp4")
+ metadata = extract_metadata_from(blob)
+
+ assert_equal 1280, metadata[:width]
+ assert_equal 720, metadata[:height]
+ assert_equal [16, 9], metadata[:display_aspect_ratio]
+ end
+
+ test "analyzing a video with an undefined display aspect ratio" do
+ blob = create_file_blob(filename: "video_with_undefined_display_aspect_ratio.mp4", content_type: "video/mp4")
+ metadata = extract_metadata_from(blob)
+
+ assert_equal 640, metadata[:width]
+ assert_equal 480, metadata[:height]
+ assert_nil metadata[:display_aspect_ratio]
+ end
+
test "analyzing a video without a video stream" do
blob = create_file_blob(filename: "video_without_video_stream.mp4", content_type: "video/mp4")
- assert_equal({ "analyzed" => true }, blob.tap(&:analyze).metadata)
+ metadata = extract_metadata_from(blob)
+ assert_equal({ "analyzed" => true, "identified" => true }, metadata)
end
end
diff --git a/activestorage/test/controllers/blobs_controller_test.rb b/activestorage/test/controllers/blobs_controller_test.rb
index 97177e64c2..9c811df895 100644
--- a/activestorage/test/controllers/blobs_controller_test.rb
+++ b/activestorage/test/controllers/blobs_controller_test.rb
@@ -8,6 +8,11 @@ class ActiveStorage::BlobsControllerTest < ActionDispatch::IntegrationTest
@blob = create_file_blob filename: "racecar.jpg"
end
+ test "showing blob with invalid signed ID" do
+ get rails_service_blob_url("invalid", "racecar.jpg")
+ assert_response :not_found
+ end
+
test "showing blob utilizes browser caching" do
get rails_blob_url(@blob)
diff --git a/activestorage/test/controllers/direct_uploads_controller_test.rb b/activestorage/test/controllers/direct_uploads_controller_test.rb
index 888767086c..1b16da17d9 100644
--- a/activestorage/test/controllers/direct_uploads_controller_test.rb
+++ b/activestorage/test/controllers/direct_uploads_controller_test.rb
@@ -27,7 +27,7 @@ if SERVICE_CONFIGURATIONS[:s3] && SERVICE_CONFIGURATIONS[:s3][:access_key_id].pr
assert_equal checksum, details["checksum"]
assert_equal "text/plain", details["content_type"]
assert_match SERVICE_CONFIGURATIONS[:s3][:bucket], details["direct_upload"]["url"]
- assert_match(/s3\.(\S+)?amazonaws\.com/, details["direct_upload"]["url"])
+ assert_match(/s3(-[-a-z0-9]+)?\.(\S+)?amazonaws\.com/, details["direct_upload"]["url"])
assert_equal({ "Content-Type" => "text/plain", "Content-MD5" => checksum }, details["direct_upload"]["headers"])
end
end
@@ -62,7 +62,7 @@ if SERVICE_CONFIGURATIONS[:gcs]
assert_equal checksum, details["checksum"]
assert_equal "text/plain", details["content_type"]
assert_match %r{storage\.googleapis\.com/#{@config[:bucket]}}, details["direct_upload"]["url"]
- assert_equal({ "Content-Type" => "text/plain", "Content-MD5" => checksum }, details["direct_upload"]["headers"])
+ assert_equal({ "Content-MD5" => checksum }, details["direct_upload"]["headers"])
end
end
end
@@ -121,4 +121,27 @@ class ActiveStorage::DiskDirectUploadsControllerTest < ActionDispatch::Integrati
assert_equal({ "Content-Type" => "text/plain" }, details["direct_upload"]["headers"])
end
end
+
+ test "creating new direct upload does not include root in json" do
+ checksum = Digest::MD5.base64digest("Hello")
+
+ set_include_root_in_json(true) do
+ post rails_direct_uploads_url, params: { blob: {
+ filename: "hello.txt", byte_size: 6, checksum: checksum, content_type: "text/plain" } }
+ end
+
+ @response.parsed_body.tap do |details|
+ assert_nil details["blob"]
+ assert_not_nil details["id"]
+ end
+ end
+
+ private
+ def set_include_root_in_json(value)
+ original = ActiveRecord::Base.include_root_in_json
+ ActiveRecord::Base.include_root_in_json = value
+ yield
+ ensure
+ ActiveRecord::Base.include_root_in_json = original
+ end
end
diff --git a/activestorage/test/controllers/disk_controller_test.rb b/activestorage/test/controllers/disk_controller_test.rb
index 940dbf5918..c053052f6f 100644
--- a/activestorage/test/controllers/disk_controller_test.rb
+++ b/activestorage/test/controllers/disk_controller_test.rb
@@ -6,18 +6,29 @@ require "database/setup"
class ActiveStorage::DiskControllerTest < ActionDispatch::IntegrationTest
test "showing blob inline" do
blob = create_blob
-
get blob.service_url
- assert_equal "inline; filename=\"hello.txt\"; filename*=UTF-8''hello.txt", @response.headers["Content-Disposition"]
- assert_equal "text/plain", @response.headers["Content-Type"]
+ assert_response :ok
+ assert_equal "inline; filename=\"hello.txt\"; filename*=UTF-8''hello.txt", response.headers["Content-Disposition"]
+ assert_equal "text/plain", response.headers["Content-Type"]
+ assert_equal "Hello world!", response.body
end
test "showing blob as attachment" do
blob = create_blob
-
get blob.service_url(disposition: :attachment)
- assert_equal "attachment; filename=\"hello.txt\"; filename*=UTF-8''hello.txt", @response.headers["Content-Disposition"]
- assert_equal "text/plain", @response.headers["Content-Type"]
+ assert_response :ok
+ assert_equal "attachment; filename=\"hello.txt\"; filename*=UTF-8''hello.txt", response.headers["Content-Disposition"]
+ assert_equal "text/plain", response.headers["Content-Type"]
+ assert_equal "Hello world!", response.body
+ end
+
+ test "showing blob range inline" do
+ blob = create_blob
+ get blob.service_url, headers: { "Range" => "bytes=5-9" }
+ assert_response :partial_content
+ assert_equal "inline; filename=\"hello.txt\"; filename*=UTF-8''hello.txt", response.headers["Content-Disposition"]
+ assert_equal "text/plain", response.headers["Content-Type"]
+ assert_equal " worl", response.body
end
@@ -56,4 +67,10 @@ class ActiveStorage::DiskControllerTest < ActionDispatch::IntegrationTest
assert_response :unprocessable_entity
assert_not blob.service.exist?(blob.key)
end
+
+ test "directly uploading blob with invalid token" do
+ put update_rails_disk_service_url(encoded_token: "invalid"),
+ params: "Something else entirely!", headers: { "Content-Type" => "text/plain" }
+ assert_response :not_found
+ end
end
diff --git a/activestorage/test/controllers/previews_controller_test.rb b/activestorage/test/controllers/previews_controller_test.rb
deleted file mode 100644
index c3151a710e..0000000000
--- a/activestorage/test/controllers/previews_controller_test.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require "test_helper"
-require "database/setup"
-
-class ActiveStorage::PreviewsControllerTest < ActionDispatch::IntegrationTest
- setup do
- @blob = create_file_blob filename: "report.pdf", content_type: "application/pdf"
- end
-
- test "showing preview inline" do
- get rails_blob_preview_url(
- filename: @blob.filename,
- signed_blob_id: @blob.signed_id,
- variation_key: ActiveStorage::Variation.encode(resize: "100x100"))
-
- assert @blob.preview_image.attached?
- assert_redirected_to(/report\.png\?.*disposition=inline/)
-
- image = read_image(@blob.preview_image.variant(resize: "100x100"))
- assert_equal 77, image.width
- assert_equal 100, image.height
- end
-end
diff --git a/activestorage/test/controllers/representations_controller_test.rb b/activestorage/test/controllers/representations_controller_test.rb
new file mode 100644
index 0000000000..2662cc5283
--- /dev/null
+++ b/activestorage/test/controllers/representations_controller_test.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require "test_helper"
+require "database/setup"
+
+class ActiveStorage::RepresentationsControllerWithVariantsTest < ActionDispatch::IntegrationTest
+ setup do
+ @blob = create_file_blob filename: "racecar.jpg"
+ end
+
+ test "showing variant inline" do
+ get rails_blob_representation_url(
+ filename: @blob.filename,
+ signed_blob_id: @blob.signed_id,
+ variation_key: ActiveStorage::Variation.encode(resize: "100x100"))
+
+ assert_redirected_to(/racecar\.jpg\?.*disposition=inline/)
+
+ image = read_image(@blob.variant(resize: "100x100"))
+ assert_equal 100, image.width
+ assert_equal 67, image.height
+ end
+
+ test "showing variant with invalid signed blob ID" do
+ get rails_blob_representation_url(
+ filename: @blob.filename,
+ signed_blob_id: "invalid",
+ variation_key: ActiveStorage::Variation.encode(resize: "100x100"))
+
+ assert_response :not_found
+ end
+end
+
+class ActiveStorage::RepresentationsControllerWithPreviewsTest < ActionDispatch::IntegrationTest
+ setup do
+ @blob = create_file_blob filename: "report.pdf", content_type: "application/pdf"
+ end
+
+ test "showing preview inline" do
+ get rails_blob_representation_url(
+ filename: @blob.filename,
+ signed_blob_id: @blob.signed_id,
+ variation_key: ActiveStorage::Variation.encode(resize: "100x100"))
+
+ assert_predicate @blob.preview_image, :attached?
+ assert_redirected_to(/report\.png\?.*disposition=inline/)
+
+ image = read_image(@blob.preview_image.variant(resize: "100x100"))
+ assert_equal 77, image.width
+ assert_equal 100, image.height
+ end
+
+ test "showing preview with invalid signed blob ID" do
+ get rails_blob_representation_url(
+ filename: @blob.filename,
+ signed_blob_id: "invalid",
+ variation_key: ActiveStorage::Variation.encode(resize: "100x100"))
+
+ assert_response :not_found
+ end
+end
diff --git a/activestorage/test/controllers/variants_controller_test.rb b/activestorage/test/controllers/variants_controller_test.rb
deleted file mode 100644
index 6c70d73786..0000000000
--- a/activestorage/test/controllers/variants_controller_test.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require "test_helper"
-require "database/setup"
-
-class ActiveStorage::VariantsControllerTest < ActionDispatch::IntegrationTest
- setup do
- @blob = create_file_blob filename: "racecar.jpg"
- end
-
- test "showing variant inline" do
- get rails_blob_variation_url(
- filename: @blob.filename,
- signed_blob_id: @blob.signed_id,
- variation_key: ActiveStorage::Variation.encode(resize: "100x100"))
-
- assert_redirected_to(/racecar\.jpg\?.*disposition=inline/)
-
- image = read_image(@blob.variant(resize: "100x100"))
- assert_equal 100, image.width
- assert_equal 67, image.height
- end
-end
diff --git a/activestorage/test/database/setup.rb b/activestorage/test/database/setup.rb
index 705650a25d..daeeb5695b 100644
--- a/activestorage/test/database/setup.rb
+++ b/activestorage/test/database/setup.rb
@@ -3,5 +3,5 @@
require_relative "create_users_migration"
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
-ActiveRecord::Migrator.migrate File.expand_path("../../db/migrate", __dir__)
+ActiveRecord::Base.connection.migration_context.migrate
ActiveStorageCreateUsers.migrate(:up)
diff --git a/activestorage/test/dummy/config/application.rb b/activestorage/test/dummy/config/application.rb
index 7ee6625bb5..bd14ac0b1a 100644
--- a/activestorage/test/dummy/config/application.rb
+++ b/activestorage/test/dummy/config/application.rb
@@ -10,16 +10,12 @@ require "action_controller/railtie"
require "action_view/railtie"
require "sprockets/railtie"
require "active_storage/engine"
-#require "action_mailer/railtie"
-#require "rails/test_unit/railtie"
-#require "action_cable/engine"
-
Bundler.require(*Rails.groups)
module Dummy
class Application < Rails::Application
- config.load_defaults 5.1
+ config.load_defaults 5.2
config.active_storage.service = :local
end
diff --git a/activestorage/test/dummy/config/environments/test.rb b/activestorage/test/dummy/config/environments/test.rb
index ce0889e8ae..74a802d98c 100644
--- a/activestorage/test/dummy/config/environments/test.rb
+++ b/activestorage/test/dummy/config/environments/test.rb
@@ -30,6 +30,9 @@ Rails.application.configure do
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
+ # Disable request forgery protection in test environment.
+ config.action_controller.allow_forgery_protection = false
+
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end
diff --git a/activestorage/test/dummy/config/secrets.yml b/activestorage/test/dummy/config/secrets.yml
index 77d1fc383a..18ada4405e 100644
--- a/activestorage/test/dummy/config/secrets.yml
+++ b/activestorage/test/dummy/config/secrets.yml
@@ -25,7 +25,7 @@ test:
# Do not keep production secrets in the unencrypted secrets file.
# Instead, either read values from the environment.
-# Or, use `bin/rails secrets:setup` to configure encrypted secrets
+# Or, use `rails secrets:setup` to configure encrypted secrets
# and move the `production:` environment over there.
production:
diff --git a/activestorage/test/fixtures/files/favicon.ico b/activestorage/test/fixtures/files/favicon.ico
new file mode 100644
index 0000000000..87192a8a07
--- /dev/null
+++ b/activestorage/test/fixtures/files/favicon.ico
Binary files differ
diff --git a/activestorage/test/fixtures/files/icon.psd b/activestorage/test/fixtures/files/icon.psd
new file mode 100644
index 0000000000..631fceeaab
--- /dev/null
+++ b/activestorage/test/fixtures/files/icon.psd
Binary files differ
diff --git a/activestorage/test/fixtures/files/icon.svg b/activestorage/test/fixtures/files/icon.svg
new file mode 100644
index 0000000000..6cfb0e241e
--- /dev/null
+++ b/activestorage/test/fixtures/files/icon.svg
@@ -0,0 +1,13 @@
+<!-- The XML declaration is intentionally omitted. -->
+<svg width="792px" height="584px" viewBox="0 0 792 584" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+ <path d="M9.51802657,28.724593 C9.51802657,18.2155955 18.1343454,9.60822622 28.6542694,9.60822622 L763.245541,9.60822622 C773.765465,9.60822622 782.381784,18.2155955 782.381784,28.724593 L782.381784,584 L792,584 L792,28.724593 C792,12.911054 779.075522,0 763.245541,0 L28.7544592,0 C12.9244782,0 0,12.911054 0,28.724593 L0,584 L9.61821632,584 C9.51802657,584 9.51802657,28.724593 9.51802657,28.724593 L9.51802657,28.724593 Z" id="Shape" opacity="0.3" fill="#CCCCCC"></path>
+ <circle id="Oval" fill="#FFCC33" cx="119.1" cy="147.2" r="33"></circle>
+ <circle id="Oval" fill="#3399FF" cx="119.1" cy="281.1" r="33"></circle>
+ <circle id="Oval" fill="#FF3333" cx="676.1" cy="376.8" r="33"></circle>
+ <circle id="Oval" fill="#FFCC33" cx="119.1" cy="477.2" r="33"></circle>
+ <rect id="Rectangle-path" opacity="0.75" fill="#CCCCCC" x="176.5" y="130.5" width="442.1" height="33.5"></rect>
+ <rect id="Rectangle-path" opacity="0.75" fill="#CCCCCC" x="176.5" y="183.1" width="442.1" height="33.5"></rect>
+ <rect id="Rectangle-path" opacity="0.75" fill="#CCCCCC" x="176.5" y="265.8" width="442.1" height="33.5"></rect>
+ <rect id="Rectangle-path" opacity="0.75" fill="#CCCCCC" x="176.5" y="363.4" width="442.1" height="33.5"></rect>
+ <rect id="Rectangle-path" opacity="0.75" fill="#CCCCCC" x="176.5" y="465.3" width="442.1" height="33.5"></rect>
+</svg>
diff --git a/activestorage/test/fixtures/files/image.gif b/activestorage/test/fixtures/files/image.gif
new file mode 100644
index 0000000000..90c05f671c
--- /dev/null
+++ b/activestorage/test/fixtures/files/image.gif
Binary files differ
diff --git a/activestorage/test/fixtures/files/racecar_rotated.jpg b/activestorage/test/fixtures/files/racecar_rotated.jpg
new file mode 100644
index 0000000000..89e6d54f98
--- /dev/null
+++ b/activestorage/test/fixtures/files/racecar_rotated.jpg
Binary files differ
diff --git a/activestorage/test/fixtures/files/video_with_rectangular_samples.mp4 b/activestorage/test/fixtures/files/video_with_rectangular_samples.mp4
new file mode 100644
index 0000000000..12b04afc87
--- /dev/null
+++ b/activestorage/test/fixtures/files/video_with_rectangular_samples.mp4
Binary files differ
diff --git a/activestorage/test/fixtures/files/video_with_undefined_display_aspect_ratio.mp4 b/activestorage/test/fixtures/files/video_with_undefined_display_aspect_ratio.mp4
new file mode 100644
index 0000000000..eb354e756f
--- /dev/null
+++ b/activestorage/test/fixtures/files/video_with_undefined_display_aspect_ratio.mp4
Binary files differ
diff --git a/activestorage/test/jobs/purge_job_test.rb b/activestorage/test/jobs/purge_job_test.rb
new file mode 100644
index 0000000000..251022a96f
--- /dev/null
+++ b/activestorage/test/jobs/purge_job_test.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require "test_helper"
+require "database/setup"
+
+class ActiveStorage::PurgeJobTest < ActiveJob::TestCase
+ setup { @blob = create_blob }
+
+ test "purges" do
+ assert_difference -> { ActiveStorage::Blob.count }, -1 do
+ ActiveStorage::PurgeJob.perform_now @blob
+ end
+
+ assert_not ActiveStorage::Blob.exists?(@blob.id)
+ assert_not ActiveStorage::Blob.service.exist?(@blob.key)
+ end
+
+ test "ignores missing blob" do
+ @blob.purge
+
+ perform_enqueued_jobs do
+ assert_nothing_raised do
+ ActiveStorage::PurgeJob.perform_later @blob
+ end
+ end
+ end
+end
diff --git a/activestorage/test/models/attached/many_test.rb b/activestorage/test/models/attached/many_test.rb
new file mode 100644
index 0000000000..3b563b3fc8
--- /dev/null
+++ b/activestorage/test/models/attached/many_test.rb
@@ -0,0 +1,596 @@
+# frozen_string_literal: true
+
+require "test_helper"
+require "database/setup"
+
+class ActiveStorage::ManyAttachedTest < ActiveSupport::TestCase
+ include ActiveJob::TestHelper
+
+ setup do
+ @user = User.create!(name: "Josh")
+ end
+
+ teardown { ActiveStorage::Blob.all.each(&:delete) }
+
+ test "attaching existing blobs to an existing record" do
+ @user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ end
+
+ test "attaching existing blobs from signed IDs to an existing record" do
+ @user.highlights.attach create_blob(filename: "funky.jpg").signed_id, create_blob(filename: "town.jpg").signed_id
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ end
+
+ test "attaching new blobs from Hashes to an existing record" do
+ @user.highlights.attach(
+ { io: StringIO.new("STUFF"), filename: "funky.jpg", content_type: "image/jpg" },
+ { io: StringIO.new("THINGS"), filename: "town.jpg", content_type: "image/jpeg" })
+
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ end
+
+ test "attaching new blobs from uploaded files to an existing record" do
+ @user.highlights.attach fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4")
+ assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
+ assert_equal "video.mp4", @user.highlights.second.filename.to_s
+ end
+
+ test "attaching existing blobs to an existing, changed record" do
+ @user.name = "Tina"
+ assert @user.changed?
+
+ @user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ assert_not @user.highlights.first.persisted?
+ assert_not @user.highlights.second.persisted?
+ assert @user.will_save_change_to_name?
+
+ @user.save!
+ assert_equal "funky.jpg", @user.highlights.reload.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ end
+
+ test "attaching existing blobs from signed IDs to an existing, changed record" do
+ @user.name = "Tina"
+ assert @user.changed?
+
+ @user.highlights.attach create_blob(filename: "funky.jpg").signed_id, create_blob(filename: "town.jpg").signed_id
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ assert_not @user.highlights.first.persisted?
+ assert_not @user.highlights.second.persisted?
+ assert @user.will_save_change_to_name?
+
+ @user.save!
+ assert_equal "funky.jpg", @user.highlights.reload.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ end
+
+ test "attaching new blobs from Hashes to an existing, changed record" do
+ @user.name = "Tina"
+ assert @user.changed?
+
+ @user.highlights.attach(
+ { io: StringIO.new("STUFF"), filename: "funky.jpg", content_type: "image/jpg" },
+ { io: StringIO.new("THINGS"), filename: "town.jpg", content_type: "image/jpeg" })
+
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ assert_not @user.highlights.first.persisted?
+ assert_not @user.highlights.second.persisted?
+ assert @user.will_save_change_to_name?
+
+ @user.save!
+ assert_equal "funky.jpg", @user.highlights.reload.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ end
+
+ test "attaching new blobs from uploaded files to an existing, changed record" do
+ @user.name = "Tina"
+ assert @user.changed?
+
+ @user.highlights.attach fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4")
+ assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
+ assert_equal "video.mp4", @user.highlights.second.filename.to_s
+ assert_not @user.highlights.first.persisted?
+ assert_not @user.highlights.second.persisted?
+ assert @user.will_save_change_to_name?
+
+ @user.save!
+ assert_equal "racecar.jpg", @user.highlights.reload.first.filename.to_s
+ assert_equal "video.mp4", @user.highlights.second.filename.to_s
+ end
+
+ test "attaching existing blobs to an existing record one at a time" do
+ @user.highlights.attach create_blob(filename: "funky.jpg")
+ @user.highlights.attach create_blob(filename: "town.jpg")
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+
+ @user.reload
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ end
+
+ test "updating an existing record to attach existing blobs" do
+ @user.update! highlights: [ create_file_blob(filename: "racecar.jpg"), create_file_blob(filename: "video.mp4") ]
+ assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
+ assert_equal "video.mp4", @user.highlights.second.filename.to_s
+ end
+
+ test "updating an existing record to attach existing blobs from signed IDs" do
+ @user.update! highlights: [ create_blob(filename: "funky.jpg").signed_id, create_blob(filename: "town.jpg").signed_id ]
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ end
+
+ test "successfully updating an existing record to attach new blobs from uploaded files" do
+ @user.highlights = [ fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4") ]
+ assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
+ assert_equal "video.mp4", @user.highlights.second.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(@user.highlights.second.key)
+
+ @user.save!
+ assert ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
+ assert ActiveStorage::Blob.service.exist?(@user.highlights.second.key)
+ end
+
+ test "unsuccessfully updating an existing record to attach new blobs from uploaded files" do
+ assert_not @user.update(name: "", highlights: [ fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4") ])
+ assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
+ assert_equal "video.mp4", @user.highlights.second.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(@user.highlights.second.key)
+ end
+
+ test "replacing existing, dependent attachments on an existing record via assign and attach" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |old_blobs|
+ @user.highlights.attach old_blobs
+
+ @user.highlights = []
+ assert_not @user.highlights.attached?
+
+ perform_enqueued_jobs do
+ @user.highlights.attach create_blob(filename: "whenever.jpg"), create_blob(filename: "wherever.jpg")
+ end
+
+ assert_equal "whenever.jpg", @user.highlights.first.filename.to_s
+ assert_equal "wherever.jpg", @user.highlights.second.filename.to_s
+ assert_not ActiveStorage::Blob.exists?(old_blobs.first.id)
+ assert_not ActiveStorage::Blob.exists?(old_blobs.second.id)
+ assert_not ActiveStorage::Blob.service.exist?(old_blobs.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(old_blobs.second.key)
+ end
+ end
+
+ test "replacing existing, independent attachments on an existing record via assign and attach" do
+ @user.vlogs.attach create_blob(filename: "funky.mp4"), create_blob(filename: "town.mp4")
+
+ @user.vlogs = []
+ assert_not @user.vlogs.attached?
+
+ assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
+ @user.vlogs.attach create_blob(filename: "whenever.mp4"), create_blob(filename: "wherever.mp4")
+ end
+
+ assert_equal "whenever.mp4", @user.vlogs.first.filename.to_s
+ assert_equal "wherever.mp4", @user.vlogs.second.filename.to_s
+ end
+
+ test "successfully updating an existing record to replace existing, dependent attachments" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |old_blobs|
+ @user.highlights.attach old_blobs
+
+ perform_enqueued_jobs do
+ @user.update! highlights: [ create_blob(filename: "whenever.jpg"), create_blob(filename: "wherever.jpg") ]
+ end
+
+ assert_equal "whenever.jpg", @user.highlights.first.filename.to_s
+ assert_equal "wherever.jpg", @user.highlights.second.filename.to_s
+ assert_not ActiveStorage::Blob.exists?(old_blobs.first.id)
+ assert_not ActiveStorage::Blob.exists?(old_blobs.second.id)
+ assert_not ActiveStorage::Blob.service.exist?(old_blobs.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(old_blobs.second.key)
+ end
+ end
+
+ test "successfully updating an existing record to replace existing, independent attachments" do
+ @user.vlogs.attach create_blob(filename: "funky.mp4"), create_blob(filename: "town.mp4")
+
+ assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
+ @user.update! vlogs: [ create_blob(filename: "whenever.mp4"), create_blob(filename: "wherever.mp4") ]
+ end
+
+ assert_equal "whenever.mp4", @user.vlogs.first.filename.to_s
+ assert_equal "wherever.mp4", @user.vlogs.second.filename.to_s
+ end
+
+ test "unsuccessfully updating an existing record to replace existing attachments" do
+ @user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
+
+ assert_no_enqueued_jobs do
+ assert_not @user.update(name: "", highlights: [ fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4") ])
+ end
+
+ assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
+ assert_equal "video.mp4", @user.highlights.second.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(@user.highlights.second.key)
+ end
+
+ test "updating an existing record to attach one new blob and one previously-attached blob" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
+ @user.highlights.attach blobs.first
+
+ perform_enqueued_jobs do
+ assert_no_changes -> { @user.highlights_attachments.first.id } do
+ @user.update! highlights: blobs
+ end
+ end
+
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+ assert ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
+ end
+ end
+
+ test "updating an existing record to remove dependent attachments" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
+ @user.highlights.attach blobs
+
+ assert_enqueued_with job: ActiveStorage::PurgeJob, args: [ blobs.first ] do
+ assert_enqueued_with job: ActiveStorage::PurgeJob, args: [ blobs.second ] do
+ @user.update! highlights: []
+ end
+ end
+
+ assert_not @user.highlights.attached?
+ end
+ end
+
+ test "updating an existing record to remove independent attachments" do
+ [ create_blob(filename: "funky.mp4"), create_blob(filename: "town.mp4") ].tap do |blobs|
+ @user.vlogs.attach blobs
+
+ assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
+ @user.update! vlogs: []
+ end
+
+ assert_not @user.vlogs.attached?
+ end
+ end
+
+ test "analyzing a new blob from an uploaded file after attaching it to an existing record" do
+ perform_enqueued_jobs do
+ @user.highlights.attach fixture_file_upload("racecar.jpg")
+ end
+
+ assert @user.highlights.reload.first.analyzed?
+ assert_equal 4104, @user.highlights.first.metadata[:width]
+ assert_equal 2736, @user.highlights.first.metadata[:height]
+ end
+
+ test "analyzing a new blob from an uploaded file after attaching it to an existing record via update" do
+ perform_enqueued_jobs do
+ @user.update! highlights: [ fixture_file_upload("racecar.jpg") ]
+ end
+
+ assert @user.highlights.reload.first.analyzed?
+ assert_equal 4104, @user.highlights.first.metadata[:width]
+ assert_equal 2736, @user.highlights.first.metadata[:height]
+ end
+
+ test "analyzing a directly-uploaded blob after attaching it to an existing record" do
+ perform_enqueued_jobs do
+ @user.highlights.attach directly_upload_file_blob(filename: "racecar.jpg")
+ end
+
+ assert @user.highlights.reload.first.analyzed?
+ assert_equal 4104, @user.highlights.first.metadata[:width]
+ assert_equal 2736, @user.highlights.first.metadata[:height]
+ end
+
+ test "analyzing a directly-uploaded blob after attaching it to an existing record via update" do
+ perform_enqueued_jobs do
+ @user.update! highlights: [ directly_upload_file_blob(filename: "racecar.jpg") ]
+ end
+
+ assert @user.highlights.reload.first.analyzed?
+ assert_equal 4104, @user.highlights.first.metadata[:width]
+ assert_equal 2736, @user.highlights.first.metadata[:height]
+ end
+
+ test "attaching existing blobs to a new record" do
+ User.new(name: "Jason").tap do |user|
+ user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
+ assert user.new_record?
+ assert_equal "funky.jpg", user.highlights.first.filename.to_s
+ assert_equal "town.jpg", user.highlights.second.filename.to_s
+
+ user.save!
+ assert_equal "funky.jpg", user.highlights.first.filename.to_s
+ assert_equal "town.jpg", user.highlights.second.filename.to_s
+ end
+ end
+
+ test "attaching an existing blob from a signed ID to a new record" do
+ User.new(name: "Jason").tap do |user|
+ user.avatar.attach create_blob(filename: "funky.jpg").signed_id
+ assert user.new_record?
+ assert_equal "funky.jpg", user.avatar.filename.to_s
+
+ user.save!
+ assert_equal "funky.jpg", user.reload.avatar.filename.to_s
+ end
+ end
+
+ test "attaching new blobs from Hashes to a new record" do
+ User.new(name: "Jason").tap do |user|
+ user.highlights.attach(
+ { io: StringIO.new("STUFF"), filename: "funky.jpg", content_type: "image/jpg" },
+ { io: StringIO.new("THINGS"), filename: "town.jpg", content_type: "image/jpg" })
+
+ assert user.new_record?
+ assert user.highlights.first.new_record?
+ assert user.highlights.second.new_record?
+ assert user.highlights.first.blob.new_record?
+ assert user.highlights.second.blob.new_record?
+ assert_equal "funky.jpg", user.highlights.first.filename.to_s
+ assert_equal "town.jpg", user.highlights.second.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(user.highlights.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(user.highlights.second.key)
+
+ user.save!
+ assert user.highlights.first.persisted?
+ assert user.highlights.second.persisted?
+ assert user.highlights.first.blob.persisted?
+ assert user.highlights.second.blob.persisted?
+ assert_equal "funky.jpg", user.reload.highlights.first.filename.to_s
+ assert_equal "town.jpg", user.highlights.second.filename.to_s
+ assert ActiveStorage::Blob.service.exist?(user.highlights.first.key)
+ assert ActiveStorage::Blob.service.exist?(user.highlights.second.key)
+ end
+ end
+
+ test "attaching new blobs from uploaded files to a new record" do
+ User.new(name: "Jason").tap do |user|
+ user.highlights.attach fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4")
+ assert user.new_record?
+ assert user.highlights.first.new_record?
+ assert user.highlights.second.new_record?
+ assert user.highlights.first.blob.new_record?
+ assert user.highlights.second.blob.new_record?
+ assert_equal "racecar.jpg", user.highlights.first.filename.to_s
+ assert_equal "video.mp4", user.highlights.second.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(user.highlights.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(user.highlights.second.key)
+
+ user.save!
+ assert user.highlights.first.persisted?
+ assert user.highlights.second.persisted?
+ assert user.highlights.first.blob.persisted?
+ assert user.highlights.second.blob.persisted?
+ assert_equal "racecar.jpg", user.reload.highlights.first.filename.to_s
+ assert_equal "video.mp4", user.highlights.second.filename.to_s
+ assert ActiveStorage::Blob.service.exist?(user.highlights.first.key)
+ assert ActiveStorage::Blob.service.exist?(user.highlights.second.key)
+ end
+ end
+
+ test "creating a record with existing blobs attached" do
+ user = User.create!(name: "Jason", highlights: [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ])
+ assert_equal "funky.jpg", user.reload.highlights.first.filename.to_s
+ assert_equal "town.jpg", user.reload.highlights.second.filename.to_s
+ end
+
+ test "creating a record with an existing blob from signed IDs attached" do
+ user = User.create!(name: "Jason", highlights: [
+ create_blob(filename: "funky.jpg").signed_id, create_blob(filename: "town.jpg").signed_id ])
+ assert_equal "funky.jpg", user.reload.highlights.first.filename.to_s
+ assert_equal "town.jpg", user.reload.highlights.second.filename.to_s
+ end
+
+ test "creating a record with new blobs from uploaded files attached" do
+ User.new(name: "Jason", highlights: [ fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4") ]).tap do |user|
+ assert user.new_record?
+ assert user.highlights.first.new_record?
+ assert user.highlights.second.new_record?
+ assert user.highlights.first.blob.new_record?
+ assert user.highlights.second.blob.new_record?
+ assert_equal "racecar.jpg", user.highlights.first.filename.to_s
+ assert_equal "video.mp4", user.highlights.second.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(user.highlights.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(user.highlights.second.key)
+
+ user.save!
+ assert_equal "racecar.jpg", user.highlights.first.filename.to_s
+ assert_equal "video.mp4", user.highlights.second.filename.to_s
+ end
+ end
+
+ test "creating a record with an unexpected object attached" do
+ error = assert_raises(ArgumentError) { User.create!(name: "Jason", highlights: :foo) }
+ assert_equal "Could not find or build blob: expected attachable, got :foo", error.message
+ end
+
+ test "analyzing a new blob from an uploaded file after attaching it to a new record" do
+ perform_enqueued_jobs do
+ user = User.create!(name: "Jason", highlights: [ fixture_file_upload("racecar.jpg") ])
+ assert user.highlights.reload.first.analyzed?
+ assert_equal 4104, user.highlights.first.metadata[:width]
+ assert_equal 2736, user.highlights.first.metadata[:height]
+ end
+ end
+
+ test "analyzing a directly-uploaded blob after attaching it to a new record" do
+ perform_enqueued_jobs do
+ user = User.create!(name: "Jason", highlights: [ directly_upload_file_blob(filename: "racecar.jpg") ])
+ assert user.highlights.reload.first.analyzed?
+ assert_equal 4104, user.highlights.first.metadata[:width]
+ assert_equal 2736, user.highlights.first.metadata[:height]
+ end
+ end
+
+ test "detaching" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
+ @user.highlights.attach blobs
+ assert @user.highlights.attached?
+
+ perform_enqueued_jobs do
+ @user.highlights.detach
+ end
+
+ assert_not @user.highlights.attached?
+ assert ActiveStorage::Blob.exists?(blobs.first.id)
+ assert ActiveStorage::Blob.exists?(blobs.second.id)
+ assert ActiveStorage::Blob.service.exist?(blobs.first.key)
+ assert ActiveStorage::Blob.service.exist?(blobs.second.key)
+ end
+ end
+
+ test "purging" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
+ @user.highlights.attach blobs
+ assert @user.highlights.attached?
+
+ @user.highlights.purge
+ assert_not @user.highlights.attached?
+ assert_not ActiveStorage::Blob.exists?(blobs.first.id)
+ assert_not ActiveStorage::Blob.exists?(blobs.second.id)
+ assert_not ActiveStorage::Blob.service.exist?(blobs.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(blobs.second.key)
+ end
+ end
+
+ test "purging attachment with shared blobs" do
+ [
+ create_blob(filename: "funky.jpg"),
+ create_blob(filename: "town.jpg"),
+ create_blob(filename: "worm.jpg")
+ ].tap do |blobs|
+ @user.highlights.attach blobs
+ assert @user.highlights.attached?
+
+ another_user = User.create!(name: "John")
+ shared_blobs = [blobs.second, blobs.third]
+ another_user.highlights.attach shared_blobs
+ assert another_user.highlights.attached?
+
+ @user.highlights.purge
+ assert_not @user.highlights.attached?
+
+ assert_not ActiveStorage::Blob.exists?(blobs.first.id)
+ assert ActiveStorage::Blob.exists?(blobs.second.id)
+ assert ActiveStorage::Blob.exists?(blobs.third.id)
+
+ assert_not ActiveStorage::Blob.service.exist?(blobs.first.key)
+ assert ActiveStorage::Blob.service.exist?(blobs.second.key)
+ assert ActiveStorage::Blob.service.exist?(blobs.third.key)
+ end
+ end
+
+ test "purging later" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
+ @user.highlights.attach blobs
+ assert @user.highlights.attached?
+
+ perform_enqueued_jobs do
+ @user.highlights.purge_later
+ end
+
+ assert_not @user.highlights.attached?
+ assert_not ActiveStorage::Blob.exists?(blobs.first.id)
+ assert_not ActiveStorage::Blob.exists?(blobs.second.id)
+ assert_not ActiveStorage::Blob.service.exist?(blobs.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(blobs.second.key)
+ end
+ end
+
+ test "purging attachment later with shared blobs" do
+ [
+ create_blob(filename: "funky.jpg"),
+ create_blob(filename: "town.jpg"),
+ create_blob(filename: "worm.jpg")
+ ].tap do |blobs|
+ @user.highlights.attach blobs
+ assert @user.highlights.attached?
+
+ another_user = User.create!(name: "John")
+ shared_blobs = [blobs.second, blobs.third]
+ another_user.highlights.attach shared_blobs
+ assert another_user.highlights.attached?
+
+ perform_enqueued_jobs do
+ @user.highlights.purge_later
+ end
+
+ assert_not @user.highlights.attached?
+ assert_not ActiveStorage::Blob.exists?(blobs.first.id)
+ assert ActiveStorage::Blob.exists?(blobs.second.id)
+ assert ActiveStorage::Blob.exists?(blobs.third.id)
+
+ assert_not ActiveStorage::Blob.service.exist?(blobs.first.key)
+ assert ActiveStorage::Blob.service.exist?(blobs.second.key)
+ assert ActiveStorage::Blob.service.exist?(blobs.third.key)
+ end
+ end
+
+ test "purging dependent attachment later on destroy" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
+ @user.highlights.attach blobs
+
+ perform_enqueued_jobs do
+ @user.destroy!
+ end
+
+ assert_not ActiveStorage::Blob.exists?(blobs.first.id)
+ assert_not ActiveStorage::Blob.exists?(blobs.second.id)
+ assert_not ActiveStorage::Blob.service.exist?(blobs.first.key)
+ assert_not ActiveStorage::Blob.service.exist?(blobs.second.key)
+ end
+ end
+
+ test "not purging independent attachment on destroy" do
+ [ create_blob(filename: "funky.mp4"), create_blob(filename: "town.mp4") ].tap do |blobs|
+ @user.vlogs.attach blobs
+
+ assert_no_enqueued_jobs do
+ @user.destroy!
+ end
+ end
+ end
+
+ test "clearing change on reload" do
+ @user.highlights = [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ]
+ assert @user.highlights.attached?
+
+ @user.reload
+ assert_not @user.highlights.attached?
+ end
+
+ test "overriding attached reader" do
+ @user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
+
+ assert_equal "funky.jpg", @user.highlights.first.filename.to_s
+ assert_equal "town.jpg", @user.highlights.second.filename.to_s
+
+ begin
+ User.class_eval do
+ def highlights
+ super.reverse
+ end
+ end
+
+ assert_equal "town.jpg", @user.highlights.first.filename.to_s
+ assert_equal "funky.jpg", @user.highlights.second.filename.to_s
+ ensure
+ User.send(:remove_method, :highlights)
+ end
+ end
+end
diff --git a/activestorage/test/models/attached/one_test.rb b/activestorage/test/models/attached/one_test.rb
new file mode 100644
index 0000000000..561c3e9d23
--- /dev/null
+++ b/activestorage/test/models/attached/one_test.rb
@@ -0,0 +1,513 @@
+# frozen_string_literal: true
+
+require "test_helper"
+require "database/setup"
+
+class ActiveStorage::OneAttachedTest < ActiveSupport::TestCase
+ include ActiveJob::TestHelper
+
+ setup do
+ @user = User.create!(name: "Josh")
+ end
+
+ teardown { ActiveStorage::Blob.all.each(&:delete) }
+
+ test "attaching an existing blob to an existing record" do
+ @user.avatar.attach create_blob(filename: "funky.jpg")
+ assert_equal "funky.jpg", @user.avatar.filename.to_s
+ end
+
+ test "attaching an existing blob from a signed ID to an existing record" do
+ @user.avatar.attach create_blob(filename: "funky.jpg").signed_id
+ assert_equal "funky.jpg", @user.avatar.filename.to_s
+ end
+
+ test "attaching a new blob from a Hash to an existing record" do
+ @user.avatar.attach io: StringIO.new("STUFF"), filename: "town.jpg", content_type: "image/jpg"
+ assert_equal "town.jpg", @user.avatar.filename.to_s
+ end
+
+ test "attaching a new blob from an uploaded file to an existing record" do
+ @user.avatar.attach fixture_file_upload("racecar.jpg")
+ assert_equal "racecar.jpg", @user.avatar.filename.to_s
+ end
+
+ test "attaching an existing blob to an existing, changed record" do
+ @user.name = "Tina"
+ assert @user.changed?
+
+ @user.avatar.attach create_blob(filename: "funky.jpg")
+ assert_equal "funky.jpg", @user.avatar.filename.to_s
+ assert_not @user.avatar.persisted?
+ assert @user.will_save_change_to_name?
+
+ @user.save!
+ assert_equal "funky.jpg", @user.reload.avatar.filename.to_s
+ end
+
+ test "attaching an existing blob from a signed ID to an existing, changed record" do
+ @user.name = "Tina"
+ assert @user.changed?
+
+ @user.avatar.attach create_blob(filename: "funky.jpg").signed_id
+ assert_equal "funky.jpg", @user.avatar.filename.to_s
+ assert_not @user.avatar.persisted?
+ assert @user.will_save_change_to_name?
+
+ @user.save!
+ assert_equal "funky.jpg", @user.reload.avatar.filename.to_s
+ end
+
+ test "attaching a new blob from a Hash to an existing, changed record" do
+ @user.name = "Tina"
+ assert @user.changed?
+
+ @user.avatar.attach io: StringIO.new("STUFF"), filename: "town.jpg", content_type: "image/jpg"
+ assert_equal "town.jpg", @user.avatar.filename.to_s
+ assert_not @user.avatar.persisted?
+ assert @user.will_save_change_to_name?
+
+ @user.save!
+ assert_equal "town.jpg", @user.reload.avatar.filename.to_s
+ end
+
+ test "attaching a new blob from an uploaded file to an existing, changed record" do
+ @user.name = "Tina"
+ assert @user.changed?
+
+ @user.avatar.attach fixture_file_upload("racecar.jpg")
+ assert_equal "racecar.jpg", @user.avatar.filename.to_s
+ assert_not @user.avatar.persisted?
+ assert @user.will_save_change_to_name?
+
+ @user.save!
+ assert_equal "racecar.jpg", @user.reload.avatar.filename.to_s
+ end
+
+ test "updating an existing record to attach an existing blob" do
+ @user.update! avatar: create_blob(filename: "funky.jpg")
+ assert_equal "funky.jpg", @user.avatar.filename.to_s
+ end
+
+ test "updating an existing record to attach an existing blob from a signed ID" do
+ @user.update! avatar: create_blob(filename: "funky.jpg").signed_id
+ assert_equal "funky.jpg", @user.avatar.filename.to_s
+ end
+
+ test "successfully updating an existing record to attach a new blob from an uploaded file" do
+ @user.avatar = fixture_file_upload("racecar.jpg")
+ assert_equal "racecar.jpg", @user.avatar.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(@user.avatar.key)
+
+ @user.save!
+ assert ActiveStorage::Blob.service.exist?(@user.avatar.key)
+ end
+
+ test "unsuccessfully updating an existing record to attach a new blob from an uploaded file" do
+ assert_not @user.update(name: "", avatar: fixture_file_upload("racecar.jpg"))
+ assert_equal "racecar.jpg", @user.avatar.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(@user.avatar.key)
+ end
+
+ test "successfully replacing an existing, dependent attachment on an existing record" do
+ create_blob(filename: "funky.jpg").tap do |old_blob|
+ @user.avatar.attach old_blob
+
+ perform_enqueued_jobs do
+ @user.avatar.attach create_blob(filename: "town.jpg")
+ end
+
+ assert_equal "town.jpg", @user.avatar.filename.to_s
+ assert_not ActiveStorage::Blob.exists?(old_blob.id)
+ assert_not ActiveStorage::Blob.service.exist?(old_blob.key)
+ end
+ end
+
+ test "replacing an existing, independent attachment on an existing record" do
+ @user.cover_photo.attach create_blob(filename: "funky.jpg")
+
+ assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
+ @user.cover_photo.attach create_blob(filename: "town.jpg")
+ end
+
+ assert_equal "town.jpg", @user.cover_photo.filename.to_s
+ end
+
+ test "replacing an attached blob on an existing record with itself" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+
+ assert_no_changes -> { @user.reload.avatar_attachment.id } do
+ assert_no_enqueued_jobs do
+ @user.avatar.attach blob
+ end
+ end
+
+ assert_equal "funky.jpg", @user.avatar.filename.to_s
+ assert ActiveStorage::Blob.service.exist?(@user.avatar.key)
+ end
+ end
+
+ test "successfully updating an existing record to replace an existing, dependent attachment" do
+ create_blob(filename: "funky.jpg").tap do |old_blob|
+ @user.avatar.attach old_blob
+
+ perform_enqueued_jobs do
+ @user.update! avatar: create_blob(filename: "town.jpg")
+ end
+
+ assert_equal "town.jpg", @user.avatar.filename.to_s
+ assert_not ActiveStorage::Blob.exists?(old_blob.id)
+ assert_not ActiveStorage::Blob.service.exist?(old_blob.key)
+ end
+ end
+
+ test "successfully updating an existing record to replace an existing, independent attachment" do
+ @user.cover_photo.attach create_blob(filename: "funky.jpg")
+
+ assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
+ @user.update! cover_photo: create_blob(filename: "town.jpg")
+ end
+
+ assert_equal "town.jpg", @user.cover_photo.filename.to_s
+ end
+
+ test "unsuccessfully updating an existing record to replace an existing attachment" do
+ @user.avatar.attach create_blob(filename: "funky.jpg")
+
+ assert_no_enqueued_jobs do
+ assert_not @user.update(name: "", avatar: fixture_file_upload("racecar.jpg"))
+ end
+
+ assert_equal "racecar.jpg", @user.avatar.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(@user.avatar.key)
+ end
+
+ test "updating an existing record to replace an attached blob with itself" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+
+ assert_no_enqueued_jobs do
+ assert_no_changes -> { @user.reload.avatar_attachment.id } do
+ @user.update! avatar: blob
+ end
+ end
+ end
+ end
+
+ test "removing a dependent attachment from an existing record" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+
+ assert_enqueued_with job: ActiveStorage::PurgeJob, args: [ blob ] do
+ @user.avatar.attach nil
+ end
+
+ assert_not @user.avatar.attached?
+ end
+ end
+
+ test "removing an independent attachment from an existing record" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.cover_photo.attach blob
+
+ assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
+ @user.cover_photo.attach nil
+ end
+
+ assert_not @user.cover_photo.attached?
+ end
+ end
+
+ test "updating an existing record to remove a dependent attachment" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+
+ assert_enqueued_with job: ActiveStorage::PurgeJob, args: [ blob ] do
+ @user.update! avatar: nil
+ end
+
+ assert_not @user.avatar.attached?
+ end
+ end
+
+ test "updating an existing record to remove an independent attachment" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.cover_photo.attach blob
+
+ assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
+ @user.update! cover_photo: nil
+ end
+
+ assert_not @user.cover_photo.attached?
+ end
+ end
+
+ test "analyzing a new blob from an uploaded file after attaching it to an existing record" do
+ perform_enqueued_jobs do
+ @user.avatar.attach fixture_file_upload("racecar.jpg")
+ end
+
+ assert @user.avatar.reload.analyzed?
+ assert_equal 4104, @user.avatar.metadata[:width]
+ assert_equal 2736, @user.avatar.metadata[:height]
+ end
+
+ test "analyzing a new blob from an uploaded file after attaching it to an existing record via update" do
+ perform_enqueued_jobs do
+ @user.update! avatar: fixture_file_upload("racecar.jpg")
+ end
+
+ assert @user.avatar.reload.analyzed?
+ assert_equal 4104, @user.avatar.metadata[:width]
+ assert_equal 2736, @user.avatar.metadata[:height]
+ end
+
+ test "analyzing a directly-uploaded blob after attaching it to an existing record" do
+ perform_enqueued_jobs do
+ @user.avatar.attach directly_upload_file_blob(filename: "racecar.jpg")
+ end
+
+ assert @user.avatar.reload.analyzed?
+ assert_equal 4104, @user.avatar.metadata[:width]
+ assert_equal 2736, @user.avatar.metadata[:height]
+ end
+
+ test "analyzing a directly-uploaded blob after attaching it to an existing record via updates" do
+ perform_enqueued_jobs do
+ @user.update! avatar: directly_upload_file_blob(filename: "racecar.jpg")
+ end
+
+ assert @user.avatar.reload.analyzed?
+ assert_equal 4104, @user.avatar.metadata[:width]
+ assert_equal 2736, @user.avatar.metadata[:height]
+ end
+
+ test "attaching an existing blob to a new record" do
+ User.new(name: "Jason").tap do |user|
+ user.avatar.attach create_blob(filename: "funky.jpg")
+ assert user.new_record?
+ assert_equal "funky.jpg", user.avatar.filename.to_s
+
+ user.save!
+ assert_equal "funky.jpg", user.reload.avatar.filename.to_s
+ end
+ end
+
+ test "attaching an existing blob from a signed ID to a new record" do
+ User.new(name: "Jason").tap do |user|
+ user.avatar.attach create_blob(filename: "funky.jpg").signed_id
+ assert user.new_record?
+ assert_equal "funky.jpg", user.avatar.filename.to_s
+
+ user.save!
+ assert_equal "funky.jpg", user.reload.avatar.filename.to_s
+ end
+ end
+
+ test "attaching a new blob from a Hash to a new record" do
+ User.new(name: "Jason").tap do |user|
+ user.avatar.attach io: StringIO.new("STUFF"), filename: "town.jpg", content_type: "image/jpg"
+ assert user.new_record?
+ assert user.avatar.attachment.new_record?
+ assert user.avatar.blob.new_record?
+ assert_equal "town.jpg", user.avatar.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(user.avatar.key)
+
+ user.save!
+ assert user.avatar.attachment.persisted?
+ assert user.avatar.blob.persisted?
+ assert_equal "town.jpg", user.reload.avatar.filename.to_s
+ assert ActiveStorage::Blob.service.exist?(user.avatar.key)
+ end
+ end
+
+ test "attaching a new blob from an uploaded file to a new record" do
+ User.new(name: "Jason").tap do |user|
+ user.avatar.attach fixture_file_upload("racecar.jpg")
+ assert user.new_record?
+ assert user.avatar.attachment.new_record?
+ assert user.avatar.blob.new_record?
+ assert_equal "racecar.jpg", user.avatar.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(user.avatar.key)
+
+ user.save!
+ assert user.avatar.attachment.persisted?
+ assert user.avatar.blob.persisted?
+ assert_equal "racecar.jpg", user.reload.avatar.filename.to_s
+ assert ActiveStorage::Blob.service.exist?(user.avatar.key)
+ end
+ end
+
+ test "creating a record with an existing blob attached" do
+ user = User.create!(name: "Jason", avatar: create_blob(filename: "funky.jpg"))
+ assert_equal "funky.jpg", user.reload.avatar.filename.to_s
+ end
+
+ test "creating a record with an existing blob from a signed ID attached" do
+ user = User.create!(name: "Jason", avatar: create_blob(filename: "funky.jpg").signed_id)
+ assert_equal "funky.jpg", user.reload.avatar.filename.to_s
+ end
+
+ test "creating a record with a new blob from an uploaded file attached" do
+ User.new(name: "Jason", avatar: fixture_file_upload("racecar.jpg")).tap do |user|
+ assert user.new_record?
+ assert user.avatar.attachment.new_record?
+ assert user.avatar.blob.new_record?
+ assert_equal "racecar.jpg", user.avatar.filename.to_s
+ assert_not ActiveStorage::Blob.service.exist?(user.avatar.key)
+
+ user.save!
+ assert_equal "racecar.jpg", user.reload.avatar.filename.to_s
+ end
+ end
+
+ test "creating a record with an unexpected object attached" do
+ error = assert_raises(ArgumentError) { User.create!(name: "Jason", avatar: :foo) }
+ assert_equal "Could not find or build blob: expected attachable, got :foo", error.message
+ end
+
+ test "analyzing a new blob from an uploaded file after attaching it to a new record" do
+ perform_enqueued_jobs do
+ user = User.create!(name: "Jason", avatar: fixture_file_upload("racecar.jpg"))
+ assert user.avatar.reload.analyzed?
+ assert_equal 4104, user.avatar.metadata[:width]
+ assert_equal 2736, user.avatar.metadata[:height]
+ end
+ end
+
+ test "analyzing a directly-uploaded blob after attaching it to a new record" do
+ perform_enqueued_jobs do
+ user = User.create!(name: "Jason", avatar: directly_upload_file_blob(filename: "racecar.jpg"))
+ assert user.avatar.reload.analyzed?
+ assert_equal 4104, user.avatar.metadata[:width]
+ assert_equal 2736, user.avatar.metadata[:height]
+ end
+ end
+
+ test "detaching" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+ assert @user.avatar.attached?
+
+ perform_enqueued_jobs do
+ @user.avatar.detach
+ end
+
+ assert_not @user.avatar.attached?
+ assert ActiveStorage::Blob.exists?(blob.id)
+ assert ActiveStorage::Blob.service.exist?(blob.key)
+ end
+ end
+
+ test "purging" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+ assert @user.avatar.attached?
+
+ @user.avatar.purge
+ assert_not @user.avatar.attached?
+ assert_not ActiveStorage::Blob.exists?(blob.id)
+ assert_not ActiveStorage::Blob.service.exist?(blob.key)
+ end
+ end
+
+ test "purging an attachment with a shared blob" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+ assert @user.avatar.attached?
+
+ another_user = User.create!(name: "John")
+ another_user.avatar.attach blob
+ assert another_user.avatar.attached?
+
+ @user.avatar.purge
+ assert_not @user.avatar.attached?
+ assert ActiveStorage::Blob.exists?(blob.id)
+ assert ActiveStorage::Blob.service.exist?(blob.key)
+ end
+ end
+
+ test "purging later" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+ assert @user.avatar.attached?
+
+ perform_enqueued_jobs do
+ @user.avatar.purge_later
+ end
+
+ assert_not @user.avatar.attached?
+ assert_not ActiveStorage::Blob.exists?(blob.id)
+ assert_not ActiveStorage::Blob.service.exist?(blob.key)
+ end
+ end
+
+ test "purging an attachment later with shared blob" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+ assert @user.avatar.attached?
+
+ another_user = User.create!(name: "John")
+ another_user.avatar.attach blob
+ assert another_user.avatar.attached?
+
+ perform_enqueued_jobs do
+ @user.avatar.purge_later
+ end
+
+ assert_not @user.avatar.attached?
+ assert ActiveStorage::Blob.exists?(blob.id)
+ assert ActiveStorage::Blob.service.exist?(blob.key)
+ end
+ end
+
+ test "purging dependent attachment later on destroy" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.avatar.attach blob
+
+ perform_enqueued_jobs do
+ @user.destroy!
+ end
+
+ assert_not ActiveStorage::Blob.exists?(blob.id)
+ assert_not ActiveStorage::Blob.service.exist?(blob.key)
+ end
+ end
+
+ test "not purging independent attachment on destroy" do
+ create_blob(filename: "funky.jpg").tap do |blob|
+ @user.cover_photo.attach blob
+
+ assert_no_enqueued_jobs do
+ @user.destroy!
+ end
+ end
+ end
+
+ test "clearing change on reload" do
+ @user.avatar = create_blob(filename: "funky.jpg")
+ assert @user.avatar.attached?
+
+ @user.reload
+ assert_not @user.avatar.attached?
+ end
+
+ test "overriding attached reader" do
+ @user.avatar.attach create_blob(filename: "funky.jpg")
+
+ assert_equal "funky.jpg", @user.avatar.filename.to_s
+
+ begin
+ User.class_eval do
+ def avatar
+ super.filename.to_s.reverse
+ end
+ end
+
+ assert_equal "gpj.yknuf", @user.avatar
+ ensure
+ User.send(:remove_method, :avatar)
+ end
+ end
+end
diff --git a/activestorage/test/models/attachments_test.rb b/activestorage/test/models/attachments_test.rb
deleted file mode 100644
index 47f2bd7911..0000000000
--- a/activestorage/test/models/attachments_test.rb
+++ /dev/null
@@ -1,220 +0,0 @@
-# frozen_string_literal: true
-
-require "test_helper"
-require "database/setup"
-
-class ActiveStorage::AttachmentsTest < ActiveSupport::TestCase
- include ActiveJob::TestHelper
-
- setup { @user = User.create!(name: "DHH") }
-
- teardown { ActiveStorage::Blob.all.each(&:purge) }
-
- test "attach existing blob" do
- @user.avatar.attach create_blob(filename: "funky.jpg")
- assert_equal "funky.jpg", @user.avatar.filename.to_s
- end
-
- test "attach existing blob from a signed ID" do
- @user.avatar.attach create_blob(filename: "funky.jpg").signed_id
- assert_equal "funky.jpg", @user.avatar.filename.to_s
- end
-
- test "attach new blob from a Hash" do
- @user.avatar.attach io: StringIO.new("STUFF"), filename: "town.jpg", content_type: "image/jpg"
- assert_equal "town.jpg", @user.avatar.filename.to_s
- end
-
- test "attach new blob from an UploadedFile" do
- file = file_fixture "racecar.jpg"
- @user.avatar.attach Rack::Test::UploadedFile.new file
- assert_equal "racecar.jpg", @user.avatar.filename.to_s
- end
-
- test "replace attached blob" do
- @user.avatar.attach create_blob(filename: "funky.jpg")
-
- perform_enqueued_jobs do
- assert_no_difference -> { ActiveStorage::Blob.count } do
- @user.avatar.attach create_blob(filename: "town.jpg")
- end
- end
-
- assert_equal "town.jpg", @user.avatar.filename.to_s
- end
-
- test "replace attached blob unsuccessfully" do
- @user.avatar.attach create_blob(filename: "funky.jpg")
-
- perform_enqueued_jobs do
- assert_raises do
- @user.avatar.attach nil
- end
- end
-
- assert_equal "funky.jpg", @user.reload.avatar.filename.to_s
- assert ActiveStorage::Blob.service.exist?(@user.avatar.key)
- end
-
- test "access underlying associations of new blob" do
- @user.avatar.attach create_blob(filename: "funky.jpg")
- assert_equal @user, @user.avatar_attachment.record
- assert_equal @user.avatar_attachment.blob, @user.avatar_blob
- assert_equal "funky.jpg", @user.avatar_attachment.blob.filename.to_s
- end
-
- test "analyze newly-attached blob" do
- perform_enqueued_jobs do
- @user.avatar.attach create_file_blob
- end
-
- assert_equal 4104, @user.avatar.reload.metadata[:width]
- assert_equal 2736, @user.avatar.metadata[:height]
- end
-
- test "analyze attached blob only once" do
- blob = create_file_blob
-
- perform_enqueued_jobs do
- @user.avatar.attach blob
- end
-
- assert blob.reload.analyzed?
-
- @user.avatar.attachment.destroy
-
- assert_no_enqueued_jobs do
- @user.reload.avatar.attach blob
- end
- end
-
- test "purge attached blob" do
- @user.avatar.attach create_blob(filename: "funky.jpg")
- avatar_key = @user.avatar.key
-
- @user.avatar.purge
- assert_not @user.avatar.attached?
- assert_not ActiveStorage::Blob.service.exist?(avatar_key)
- end
-
- test "purge attached blob later when the record is destroyed" do
- @user.avatar.attach create_blob(filename: "funky.jpg")
- avatar_key = @user.avatar.key
-
- perform_enqueued_jobs do
- @user.destroy
-
- assert_nil ActiveStorage::Blob.find_by(key: avatar_key)
- assert_not ActiveStorage::Blob.service.exist?(avatar_key)
- end
- end
-
- test "find with attached blob" do
- records = %w[alice bob].map do |name|
- User.create!(name: name).tap do |user|
- user.avatar.attach create_blob(filename: "#{name}.jpg")
- end
- end
-
- users = User.where(id: records.map(&:id)).with_attached_avatar.all
-
- assert_equal "alice.jpg", users.first.avatar.filename.to_s
- assert_equal "bob.jpg", users.second.avatar.filename.to_s
- end
-
-
- test "attach existing blobs" do
- @user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "wonky.jpg")
-
- assert_equal "funky.jpg", @user.highlights.first.filename.to_s
- assert_equal "wonky.jpg", @user.highlights.second.filename.to_s
- end
-
- test "attach new blobs" do
- @user.highlights.attach(
- { io: StringIO.new("STUFF"), filename: "town.jpg", content_type: "image/jpg" },
- { io: StringIO.new("IT"), filename: "country.jpg", content_type: "image/jpg" })
-
- assert_equal "town.jpg", @user.highlights.first.filename.to_s
- assert_equal "country.jpg", @user.highlights.second.filename.to_s
- end
-
- test "find attached blobs" do
- @user.highlights.attach(
- { io: StringIO.new("STUFF"), filename: "town.jpg", content_type: "image/jpg" },
- { io: StringIO.new("IT"), filename: "country.jpg", content_type: "image/jpg" })
-
- highlights = User.where(id: @user.id).with_attached_highlights.first.highlights
-
- assert_equal "town.jpg", highlights.first.filename.to_s
- assert_equal "country.jpg", highlights.second.filename.to_s
- end
-
- test "access underlying associations of new blobs" do
- @user.highlights.attach(
- { io: StringIO.new("STUFF"), filename: "town.jpg", content_type: "image/jpg" },
- { io: StringIO.new("IT"), filename: "country.jpg", content_type: "image/jpg" })
-
- assert_equal @user, @user.highlights_attachments.first.record
- assert_equal @user.highlights_attachments.collect(&:blob).sort, @user.highlights_blobs.sort
- assert_equal "town.jpg", @user.highlights_attachments.first.blob.filename.to_s
- end
-
- test "analyze newly-attached blobs" do
- perform_enqueued_jobs do
- @user.highlights.attach(
- create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg"),
- create_file_blob(filename: "video.mp4", content_type: "video/mp4"))
- end
-
- assert_equal 4104, @user.highlights.first.metadata[:width]
- assert_equal 2736, @user.highlights.first.metadata[:height]
-
- assert_equal 640, @user.highlights.second.metadata[:width]
- assert_equal 480, @user.highlights.second.metadata[:height]
- end
-
- test "analyze attached blobs only once" do
- blobs = [
- create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg"),
- create_file_blob(filename: "video.mp4", content_type: "video/mp4")
- ]
-
- perform_enqueued_jobs do
- @user.highlights.attach(blobs)
- end
-
- assert blobs.each(&:reload).all?(&:analyzed?)
-
- @user.highlights.attachments.destroy_all
-
- assert_no_enqueued_jobs do
- @user.highlights.attach(blobs)
- end
- end
-
- test "purge attached blobs" do
- @user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "wonky.jpg")
- highlight_keys = @user.highlights.collect(&:key)
-
- @user.highlights.purge
- assert_not @user.highlights.attached?
- assert_not ActiveStorage::Blob.service.exist?(highlight_keys.first)
- assert_not ActiveStorage::Blob.service.exist?(highlight_keys.second)
- end
-
- test "purge attached blobs later when the record is destroyed" do
- @user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "wonky.jpg")
- highlight_keys = @user.highlights.collect(&:key)
-
- perform_enqueued_jobs do
- @user.destroy
-
- assert_nil ActiveStorage::Blob.find_by(key: highlight_keys.first)
- assert_not ActiveStorage::Blob.service.exist?(highlight_keys.first)
-
- assert_nil ActiveStorage::Blob.find_by(key: highlight_keys.second)
- assert_not ActiveStorage::Blob.service.exist?(highlight_keys.second)
- end
- end
-end
diff --git a/activestorage/test/models/blob_test.rb b/activestorage/test/models/blob_test.rb
index 6e815997ba..88c106a08b 100644
--- a/activestorage/test/models/blob_test.rb
+++ b/activestorage/test/models/blob_test.rb
@@ -2,8 +2,22 @@
require "test_helper"
require "database/setup"
+require "active_support/testing/method_call_assertions"
class ActiveStorage::BlobTest < ActiveSupport::TestCase
+ include ActiveSupport::Testing::MethodCallAssertions
+
+ test "unattached scope" do
+ [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
+ User.create! name: "DHH", avatar: blobs.first
+ assert_includes ActiveStorage::Blob.unattached, blobs.second
+ assert_not_includes ActiveStorage::Blob.unattached, blobs.first
+
+ User.create! name: "Jason", avatar: blobs.second
+ assert_not_includes ActiveStorage::Blob.unattached, blobs.second
+ end
+ end
+
test "create after upload sets byte size and checksum" do
data = "Hello world!"
blob = create_blob data: data
@@ -13,14 +27,46 @@ class ActiveStorage::BlobTest < ActiveSupport::TestCase
assert_equal Digest::MD5.base64digest(data), blob.checksum
end
+ test "create after upload extracts content type from data" do
+ blob = create_file_blob content_type: "application/octet-stream"
+ assert_equal "image/jpeg", blob.content_type
+ end
+
+ test "create after upload extracts content type from filename" do
+ blob = create_blob content_type: "application/octet-stream"
+ assert_equal "text/plain", blob.content_type
+ end
+
+ test "create after upload extracts content_type from io when no content_type given and identify: false" do
+ blob = create_blob content_type: nil, identify: false
+ assert_equal "text/plain", blob.content_type
+ end
+
+ test "create after upload uses content_type when identify: false" do
+ blob = create_blob data: "Article,dates,analysis\n1, 2, 3", filename: "table.csv", content_type: "text/csv", identify: false
+ assert_equal "text/csv", blob.content_type
+ end
+
+ test "image?" do
+ blob = create_file_blob filename: "racecar.jpg"
+ assert_predicate blob, :image?
+ assert_not_predicate blob, :audio?
+ end
+
+ test "video?" do
+ blob = create_file_blob(filename: "video.mp4", content_type: "video/mp4")
+ assert_predicate blob, :video?
+ assert_not_predicate blob, :audio?
+ end
+
test "text?" do
blob = create_blob data: "Hello world!"
- assert blob.text?
- assert_not blob.audio?
+ assert_predicate blob, :text?
+ assert_not_predicate blob, :audio?
end
test "download yields chunks" do
- blob = create_blob data: "a" * 75.kilobytes
+ blob = create_blob data: "a" * 5.0625.megabytes
chunks = []
blob.download do |chunk|
@@ -28,8 +74,42 @@ class ActiveStorage::BlobTest < ActiveSupport::TestCase
end
assert_equal 2, chunks.size
- assert_equal "a" * 64.kilobytes, chunks.first
- assert_equal "a" * 11.kilobytes, chunks.second
+ assert_equal "a" * 5.megabytes, chunks.first
+ assert_equal "a" * 64.kilobytes, chunks.second
+ end
+
+ test "open with integrity" do
+ create_file_blob(filename: "racecar.jpg").tap do |blob|
+ blob.open do |file|
+ assert file.binmode?
+ assert_equal 0, file.pos
+ assert File.basename(file.path).starts_with?("ActiveStorage-#{blob.id}-")
+ assert file.path.ends_with?(".jpg")
+ assert_equal file_fixture("racecar.jpg").binread, file.read, "Expected downloaded file to match fixture file"
+ end
+ end
+ end
+
+ test "open without integrity" do
+ create_blob(data: "Hello, world!").tap do |blob|
+ blob.update! checksum: Digest::MD5.base64digest("Goodbye, world!")
+
+ assert_raises ActiveStorage::IntegrityError do
+ blob.open { |file| flunk "Expected integrity check to fail" }
+ end
+ end
+ end
+
+ test "open in a custom tempdir" do
+ tempdir = Dir.mktmpdir
+
+ create_file_blob(filename: "racecar.jpg").open(tempdir: tempdir) do |file|
+ assert file.binmode?
+ assert_equal 0, file.pos
+ assert_match(/\.jpg\z/, file.path)
+ assert file.path.starts_with?(tempdir)
+ assert_equal file_fixture("racecar.jpg").binread, file.read, "Expected downloaded file to match fixture file"
+ end
end
test "urls expiring in 5 minutes" do
@@ -41,16 +121,71 @@ class ActiveStorage::BlobTest < ActiveSupport::TestCase
end
end
- test "purge removes from external service" do
+ test "urls force attachment as content disposition for content types served as binary" do
+ blob = create_blob(content_type: "text/html")
+
+ freeze_time do
+ assert_equal expected_url_for(blob, disposition: :attachment), blob.service_url
+ assert_equal expected_url_for(blob, disposition: :attachment), blob.service_url(disposition: :inline)
+ end
+ end
+
+ test "urls allow for custom filename" do
+ blob = create_blob(filename: "original.txt")
+ new_filename = ActiveStorage::Filename.new("new.txt")
+
+ freeze_time do
+ assert_equal expected_url_for(blob), blob.service_url
+ assert_equal expected_url_for(blob, filename: new_filename), blob.service_url(filename: new_filename)
+ assert_equal expected_url_for(blob, filename: new_filename), blob.service_url(filename: "new.txt")
+ assert_equal expected_url_for(blob, filename: blob.filename), blob.service_url(filename: nil)
+ end
+ end
+
+ test "urls allow for custom options" do
+ blob = create_blob(filename: "original.txt")
+
+ arguments = [
+ blob.key,
+ expires_in: ActiveStorage.service_urls_expire_in,
+ disposition: :inline,
+ content_type: blob.content_type,
+ filename: blob.filename,
+ thumb_size: "300x300",
+ thumb_mode: "crop"
+ ]
+ assert_called_with(blob.service, :url, arguments) do
+ blob.service_url(thumb_size: "300x300", thumb_mode: "crop")
+ end
+ end
+
+ test "purge deletes file from external service" do
blob = create_blob
blob.purge
assert_not ActiveStorage::Blob.service.exist?(blob.key)
end
+ test "purge deletes variants from external service" do
+ blob = create_file_blob
+ variant = blob.variant(resize: "100>").processed
+
+ blob.purge
+ assert_not ActiveStorage::Blob.service.exist?(variant.key)
+ end
+
+ test "purge does nothing when attachments exist" do
+ create_blob.tap do |blob|
+ User.create! name: "DHH", avatar: blob
+ assert_no_difference(-> { ActiveStorage::Blob.count }) { blob.purge }
+ assert ActiveStorage::Blob.service.exist?(blob.key)
+ end
+ end
+
private
- def expected_url_for(blob, disposition: :inline)
- query_string = { content_type: blob.content_type, disposition: "#{disposition}; #{blob.filename.parameters}" }.to_param
- "/rails/active_storage/disk/#{ActiveStorage.verifier.generate(blob.key, expires_in: 5.minutes, purpose: :blob_key)}/#{blob.filename}?#{query_string}"
+ def expected_url_for(blob, disposition: :inline, filename: nil)
+ filename ||= blob.filename
+ query_string = { content_type: blob.content_type, disposition: "#{disposition}; #{filename.parameters}" }.to_param
+ "https://example.com/rails/active_storage/disk/#{ActiveStorage.verifier.generate(blob.key, expires_in: 5.minutes, purpose: :blob_key)}/#{filename}?#{query_string}"
end
end
diff --git a/activestorage/test/models/presence_validation_test.rb b/activestorage/test/models/presence_validation_test.rb
new file mode 100644
index 0000000000..13ba3c900d
--- /dev/null
+++ b/activestorage/test/models/presence_validation_test.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require "test_helper"
+require "database/setup"
+
+class ActiveStorage::PresenceValidationTest < ActiveSupport::TestCase
+ class Admin < User; end
+
+ teardown do
+ Admin.clear_validators!
+ end
+
+ test "validates_presence_of has_one_attached" do
+ Admin.validates_presence_of :avatar
+ a = Admin.new(name: "DHH")
+ assert_predicate a, :invalid?
+
+ a.avatar.attach create_blob(filename: "funky.jpg")
+ assert_predicate a, :valid?
+ end
+
+ test "validates_presence_of has_many_attached" do
+ Admin.validates_presence_of :highlights
+ a = Admin.new(name: "DHH")
+ assert_predicate a, :invalid?
+
+ a.highlights.attach create_blob(filename: "funky.jpg")
+ assert_predicate a, :valid?
+ end
+end
diff --git a/activestorage/test/models/preview_test.rb b/activestorage/test/models/preview_test.rb
index bcd8442f4b..e7ae399fb7 100644
--- a/activestorage/test/models/preview_test.rb
+++ b/activestorage/test/models/preview_test.rb
@@ -8,7 +8,7 @@ class ActiveStorage::PreviewTest < ActiveSupport::TestCase
blob = create_file_blob(filename: "report.pdf", content_type: "application/pdf")
preview = blob.preview(resize: "640x280").processed
- assert preview.image.attached?
+ assert_predicate preview.image, :attached?
assert_equal "report.png", preview.image.filename.to_s
assert_equal "image/png", preview.image.content_type
@@ -21,9 +21,9 @@ class ActiveStorage::PreviewTest < ActiveSupport::TestCase
blob = create_file_blob(filename: "video.mp4", content_type: "video/mp4")
preview = blob.preview(resize: "640x280").processed
- assert preview.image.attached?
- assert_equal "video.png", preview.image.filename.to_s
- assert_equal "image/png", preview.image.content_type
+ assert_predicate preview.image, :attached?
+ assert_equal "video.jpg", preview.image.filename.to_s
+ assert_equal "image/jpeg", preview.image.content_type
image = read_image(preview.image)
assert_equal 640, image.width
@@ -33,7 +33,7 @@ class ActiveStorage::PreviewTest < ActiveSupport::TestCase
test "previewing an unpreviewable blob" do
blob = create_file_blob
- assert_raises ActiveStorage::Blob::UnpreviewableError do
+ assert_raises ActiveStorage::UnpreviewableError do
blob.preview resize: "640x280"
end
end
diff --git a/activestorage/test/models/reflection_test.rb b/activestorage/test/models/reflection_test.rb
new file mode 100644
index 0000000000..98606b0617
--- /dev/null
+++ b/activestorage/test/models/reflection_test.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require "test_helper"
+
+class ActiveStorage::ReflectionTest < ActiveSupport::TestCase
+ test "reflecting on a singular attachment" do
+ reflection = User.reflect_on_attachment(:avatar)
+ assert_equal User, reflection.active_record
+ assert_equal :avatar, reflection.name
+ assert_equal :has_one_attached, reflection.macro
+ assert_equal :purge_later, reflection.options[:dependent]
+ end
+
+ test "reflection on a singular attachment with the same name as an attachment on another model" do
+ reflection = Group.reflect_on_attachment(:avatar)
+ assert_equal Group, reflection.active_record
+ end
+
+ test "reflecting on a collection attachment" do
+ reflection = User.reflect_on_attachment(:highlights)
+ assert_equal User, reflection.active_record
+ assert_equal :highlights, reflection.name
+ assert_equal :has_many_attached, reflection.macro
+ assert_equal :purge_later, reflection.options[:dependent]
+ end
+
+ test "reflecting on all attachments" do
+ reflections = User.reflect_on_all_attachments.sort_by(&:name)
+ assert_equal [ User ], reflections.collect(&:active_record).uniq
+ assert_equal %i[ avatar cover_photo highlights vlogs ], reflections.collect(&:name)
+ assert_equal %i[ has_one_attached has_one_attached has_many_attached has_many_attached ], reflections.collect(&:macro)
+ assert_equal [ :purge_later, false, :purge_later, false ], reflections.collect { |reflection| reflection.options[:dependent] }
+ end
+end
diff --git a/activestorage/test/models/representation_test.rb b/activestorage/test/models/representation_test.rb
index 29fe61aee4..2a06b31c77 100644
--- a/activestorage/test/models/representation_test.rb
+++ b/activestorage/test/models/representation_test.rb
@@ -34,7 +34,7 @@ class ActiveStorage::RepresentationTest < ActiveSupport::TestCase
test "representing an unrepresentable blob" do
blob = create_blob
- assert_raises ActiveStorage::Blob::UnrepresentableError do
+ assert_raises ActiveStorage::UnrepresentableError do
blob.representation resize: "100x100"
end
end
diff --git a/activestorage/test/models/variant_test.rb b/activestorage/test/models/variant_test.rb
index b7d20ab55a..6577f1cd9f 100644
--- a/activestorage/test/models/variant_test.rb
+++ b/activestorage/test/models/variant_test.rb
@@ -4,31 +4,174 @@ require "test_helper"
require "database/setup"
class ActiveStorage::VariantTest < ActiveSupport::TestCase
- setup do
- @blob = create_file_blob filename: "racecar.jpg"
+ test "resized variation of JPEG blob" do
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = blob.variant(resize: "100x100").processed
+ assert_match(/racecar\.jpg/, variant.service_url)
+
+ image = read_image(variant)
+ assert_equal 100, image.width
+ assert_equal 67, image.height
end
- test "resized variation" do
- variant = @blob.variant(resize: "100x100").processed
+ test "resized and monochrome variation of JPEG blob" do
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = blob.variant(resize: "100x100", monochrome: true).processed
assert_match(/racecar\.jpg/, variant.service_url)
image = read_image(variant)
assert_equal 100, image.width
assert_equal 67, image.height
+ assert_match(/Gray/, image.colorspace)
end
- test "resized and monochrome variation" do
- variant = @blob.variant(resize: "100x100", monochrome: true).processed
+ test "monochrome with default variant_processor" do
+ begin
+ ActiveStorage.variant_processor = nil
+
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = blob.variant(monochrome: true).processed
+ image = read_image(variant)
+ assert_match(/Gray/, image.colorspace)
+ ensure
+ ActiveStorage.variant_processor = :mini_magick
+ end
+ end
+
+ test "disabled variation of JPEG blob" do
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = blob.variant(resize: "100x100", monochrome: false).processed
assert_match(/racecar\.jpg/, variant.service_url)
image = read_image(variant)
assert_equal 100, image.width
assert_equal 67, image.height
- assert_match(/Gray/, image.colorspace)
+ assert_match(/RGB/, image.colorspace)
+ end
+
+ test "disabled variation of JPEG blob with :combine_options" do
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = ActiveSupport::Deprecation.silence do
+ blob.variant(combine_options: {
+ resize: "100x100",
+ monochrome: false
+ }).processed
+ end
+ assert_match(/racecar\.jpg/, variant.service_url)
+
+ image = read_image(variant)
+ assert_equal 100, image.width
+ assert_equal 67, image.height
+ assert_match(/RGB/, image.colorspace)
+ end
+
+ test "disabled variation using :combine_options" do
+ begin
+ ActiveStorage.variant_processor = nil
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = ActiveSupport::Deprecation.silence do
+ blob.variant(combine_options: {
+ crop: "100x100+0+0",
+ monochrome: false
+ }).processed
+ end
+ assert_match(/racecar\.jpg/, variant.service_url)
+
+ image = read_image(variant)
+ assert_equal 100, image.width
+ assert_equal 100, image.height
+ assert_match(/RGB/, image.colorspace)
+ ensure
+ ActiveStorage.variant_processor = :mini_magick
+ end
+ end
+
+ test "center-weighted crop of JPEG blob using :combine_options" do
+ begin
+ ActiveStorage.variant_processor = nil
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = ActiveSupport::Deprecation.silence do
+ blob.variant(combine_options: {
+ gravity: "center",
+ resize: "100x100^",
+ crop: "100x100+0+0",
+ }).processed
+ end
+ assert_match(/racecar\.jpg/, variant.service_url)
+
+ image = read_image(variant)
+ assert_equal 100, image.width
+ assert_equal 100, image.height
+ ensure
+ ActiveStorage.variant_processor = :mini_magick
+ end
+ end
+
+ test "center-weighted crop of JPEG blob using :resize_to_fill" do
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = blob.variant(resize_to_fill: [100, 100]).processed
+ assert_match(/racecar\.jpg/, variant.service_url)
+
+ image = read_image(variant)
+ assert_equal 100, image.width
+ assert_equal 100, image.height
+ end
+
+ test "resized variation of PSD blob" do
+ blob = create_file_blob(filename: "icon.psd", content_type: "image/vnd.adobe.photoshop")
+ variant = blob.variant(resize: "20x20").processed
+ assert_match(/icon\.png/, variant.service_url)
+
+ image = read_image(variant)
+ assert_equal "PNG", image.type
+ assert_equal 20, image.width
+ assert_equal 20, image.height
+ end
+
+ test "resized variation of ICO blob" do
+ blob = create_file_blob(filename: "favicon.ico", content_type: "image/vnd.microsoft.icon")
+ variant = blob.variant(resize: "20x20").processed
+ assert_match(/icon\.png/, variant.service_url)
+
+ image = read_image(variant)
+ assert_equal "PNG", image.type
+ assert_equal 20, image.width
+ assert_equal 20, image.height
+ end
+
+ test "optimized variation of GIF blob" do
+ blob = create_file_blob(filename: "image.gif", content_type: "image/gif")
+
+ assert_nothing_raised do
+ blob.variant(layers: "Optimize").processed
+ end
+ end
+
+ test "variation of invariable blob" do
+ assert_raises ActiveStorage::InvariableError do
+ create_file_blob(filename: "report.pdf", content_type: "application/pdf").variant(resize: "100x100")
+ end
end
test "service_url doesn't grow in length despite long variant options" do
- variant = @blob.variant(font: "a" * 10_000).processed
- assert_operator variant.service_url.length, :<, 500
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = blob.variant(font: "a" * 10_000).processed
+ assert_operator variant.service_url.length, :<, 525
+ end
+
+ test "works for vips processor" do
+ begin
+ ActiveStorage.variant_processor = :vips
+ blob = create_file_blob(filename: "racecar.jpg")
+ variant = blob.variant(thumbnail_image: 100).processed
+
+ image = read_image(variant)
+ assert_equal 100, image.width
+ assert_equal 67, image.height
+ rescue LoadError
+ # libvips not installed
+ ensure
+ ActiveStorage.variant_processor = :mini_magick
+ end
end
end
diff --git a/activestorage/test/previewer/pdf_previewer_test.rb b/activestorage/test/previewer/mupdf_previewer_test.rb
index fe32f39be4..6c2db6fcbf 100644
--- a/activestorage/test/previewer/pdf_previewer_test.rb
+++ b/activestorage/test/previewer/mupdf_previewer_test.rb
@@ -3,15 +3,15 @@
require "test_helper"
require "database/setup"
-require "active_storage/previewer/pdf_previewer"
+require "active_storage/previewer/mupdf_previewer"
-class ActiveStorage::Previewer::PDFPreviewerTest < ActiveSupport::TestCase
+class ActiveStorage::Previewer::MuPDFPreviewerTest < ActiveSupport::TestCase
setup do
@blob = create_file_blob(filename: "report.pdf", content_type: "application/pdf")
end
test "previewing a PDF document" do
- ActiveStorage::Previewer::PDFPreviewer.new(@blob).preview do |attachable|
+ ActiveStorage::Previewer::MuPDFPreviewer.new(@blob).preview do |attachable|
assert_equal "image/png", attachable[:content_type]
assert_equal "report.png", attachable[:filename]
diff --git a/activestorage/test/previewer/poppler_pdf_previewer_test.rb b/activestorage/test/previewer/poppler_pdf_previewer_test.rb
new file mode 100644
index 0000000000..2b41c8b642
--- /dev/null
+++ b/activestorage/test/previewer/poppler_pdf_previewer_test.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require "test_helper"
+require "database/setup"
+
+require "active_storage/previewer/poppler_pdf_previewer"
+
+class ActiveStorage::Previewer::PopplerPDFPreviewerTest < ActiveSupport::TestCase
+ setup do
+ @blob = create_file_blob(filename: "report.pdf", content_type: "application/pdf")
+ end
+
+ test "previewing a PDF document" do
+ ActiveStorage::Previewer::PopplerPDFPreviewer.new(@blob).preview do |attachable|
+ assert_equal "image/png", attachable[:content_type]
+ assert_equal "report.png", attachable[:filename]
+
+ image = MiniMagick::Image.read(attachable[:io])
+ assert_equal 612, image.width
+ assert_equal 792, image.height
+ end
+ end
+end
diff --git a/activestorage/test/previewer/video_previewer_test.rb b/activestorage/test/previewer/video_previewer_test.rb
index dba9b0d7e2..9dc350205b 100644
--- a/activestorage/test/previewer/video_previewer_test.rb
+++ b/activestorage/test/previewer/video_previewer_test.rb
@@ -12,12 +12,13 @@ class ActiveStorage::Previewer::VideoPreviewerTest < ActiveSupport::TestCase
test "previewing an MP4 video" do
ActiveStorage::Previewer::VideoPreviewer.new(@blob).preview do |attachable|
- assert_equal "image/png", attachable[:content_type]
- assert_equal "video.png", attachable[:filename]
+ assert_equal "image/jpeg", attachable[:content_type]
+ assert_equal "video.jpg", attachable[:filename]
image = MiniMagick::Image.read(attachable[:io])
assert_equal 640, image.width
assert_equal 480, image.height
+ assert_equal "image/jpeg", image.mime_type
end
end
end
diff --git a/activestorage/test/service/azure_storage_service_test.rb b/activestorage/test/service/azure_storage_service_test.rb
index be31bbe858..09c2e7f99c 100644
--- a/activestorage/test/service/azure_storage_service_test.rb
+++ b/activestorage/test/service/azure_storage_service_test.rb
@@ -10,12 +10,29 @@ if SERVICE_CONFIGURATIONS[:azure]
include ActiveStorage::Service::SharedServiceTests
test "signed URL generation" do
- url = @service.url(FIXTURE_KEY, expires_in: 5.minutes,
+ url = @service.url(@key, expires_in: 5.minutes,
disposition: :inline, filename: ActiveStorage::Filename.new("avatar.png"), content_type: "image/png")
assert_match(/(\S+)&rscd=inline%3B\+filename%3D%22avatar\.png%22%3B\+filename\*%3DUTF-8%27%27avatar\.png&rsct=image%2Fpng/, url)
assert_match SERVICE_CONFIGURATIONS[:azure][:container], url
end
+
+ test "uploading a tempfile" do
+ begin
+ key = SecureRandom.base58(24)
+ data = "Something else entirely!"
+
+ Tempfile.open do |file|
+ file.write(data)
+ file.rewind
+ @service.upload(key, file)
+ end
+
+ assert_equal data, @service.download(key)
+ ensure
+ @service.delete(key)
+ end
+ end
end
else
puts "Skipping Azure Storage Service tests because no Azure configuration was supplied"
diff --git a/activestorage/test/service/configurations.example.yml b/activestorage/test/service/configurations.example.yml
index 56ed37be5d..a63aa33302 100644
--- a/activestorage/test/service/configurations.example.yml
+++ b/activestorage/test/service/configurations.example.yml
@@ -7,7 +7,7 @@
#
# gcs:
# service: GCS
-# keyfile: {
+# credentials: {
# type: "service_account",
# project_id: "",
# private_key_id: "",
@@ -24,7 +24,6 @@
#
# azure:
# service: AzureStorage
-# path: ""
# storage_account_name: ""
# storage_access_key: ""
# container: ""
diff --git a/activestorage/test/service/configurations.yml.enc b/activestorage/test/service/configurations.yml.enc
index df11aac161..648924a562 100644
--- a/activestorage/test/service/configurations.yml.enc
+++ b/activestorage/test/service/configurations.yml.enc
Binary files differ
diff --git a/activestorage/test/service/configurator_test.rb b/activestorage/test/service/configurator_test.rb
index a2fd035e02..3ef9cf9fb6 100644
--- a/activestorage/test/service/configurator_test.rb
+++ b/activestorage/test/service/configurator_test.rb
@@ -6,6 +6,13 @@ class ActiveStorage::Service::ConfiguratorTest < ActiveSupport::TestCase
test "builds correct service instance based on service name" do
service = ActiveStorage::Service::Configurator.build(:foo, foo: { service: "Disk", root: "path" })
assert_instance_of ActiveStorage::Service::DiskService, service
+ assert_equal "path", service.root
+ end
+
+ test "builds correct service instance based on lowercase service name" do
+ service = ActiveStorage::Service::Configurator.build(:foo, foo: { service: "disk", root: "path" })
+ assert_instance_of ActiveStorage::Service::DiskService, service
+ assert_equal "path", service.root
end
test "raises error when passing non-existent service name" do
diff --git a/activestorage/test/service/disk_service_test.rb b/activestorage/test/service/disk_service_test.rb
index 4a6361b920..a0218bff1c 100644
--- a/activestorage/test/service/disk_service_test.rb
+++ b/activestorage/test/service/disk_service_test.rb
@@ -8,7 +8,11 @@ class ActiveStorage::Service::DiskServiceTest < ActiveSupport::TestCase
include ActiveStorage::Service::SharedServiceTests
test "url generation" do
- assert_match(/rails\/active_storage\/disk\/.*\/avatar\.png\?content_type=image%2Fpng&disposition=inline/,
- @service.url(FIXTURE_KEY, expires_in: 5.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("avatar.png"), content_type: "image/png"))
+ assert_match(/^https:\/\/example.com\/rails\/active_storage\/disk\/.*\/avatar\.png\?content_type=image%2Fpng&disposition=inline/,
+ @service.url(@key, expires_in: 5.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("avatar.png"), content_type: "image/png"))
+ end
+
+ test "headers_for_direct_upload generation" do
+ assert_equal({ "Content-Type" => "application/json" }, @service.headers_for_direct_upload(@key, content_type: "application/json"))
end
end
diff --git a/activestorage/test/service/gcs_service_test.rb b/activestorage/test/service/gcs_service_test.rb
index 5566c664a9..2ba2f8b346 100644
--- a/activestorage/test/service/gcs_service_test.rb
+++ b/activestorage/test/service/gcs_service_test.rb
@@ -19,7 +19,7 @@ if SERVICE_CONFIGURATIONS[:gcs]
uri = URI.parse url
request = Net::HTTP::Put.new uri.request_uri
request.body = data
- request.add_field "Content-Type", "text/plain"
+ request.add_field "Content-Type", ""
request.add_field "Content-MD5", checksum
Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
http.request request
@@ -32,12 +32,21 @@ if SERVICE_CONFIGURATIONS[:gcs]
end
test "signed URL generation" do
- freeze_time do
- url = SERVICE.bucket.signed_url(FIXTURE_KEY, expires: 120) +
- "&response-content-disposition=inline%3B+filename%3D%22test.txt%22%3B+filename%2A%3DUTF-8%27%27test.txt" +
- "&response-content-type=text%2Fplain"
+ assert_match(/storage\.googleapis\.com\/.*response-content-disposition=inline.*test\.txt.*response-content-type=text%2Fplain/,
+ @service.url(@key, expires_in: 2.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("test.txt"), content_type: "text/plain"))
+ end
+
+ test "signed URL response headers" do
+ begin
+ key = SecureRandom.base58(24)
+ data = "Something else entirely!"
+ @service.upload(key, StringIO.new(data), checksum: Digest::MD5.base64digest(data))
- assert_equal url, @service.url(FIXTURE_KEY, expires_in: 2.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("test.txt"), content_type: "text/plain")
+ url = @service.url(key, expires_in: 2.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("test.txt"), content_type: "text/plain")
+ response = Net::HTTP.get_response(URI(url))
+ assert_equal "text/plain", response.content_type
+ ensure
+ @service.delete key
end
end
end
diff --git a/activestorage/test/service/mirror_service_test.rb b/activestorage/test/service/mirror_service_test.rb
index 92101b1282..bb502dde60 100644
--- a/activestorage/test/service/mirror_service_test.rb
+++ b/activestorage/test/service/mirror_service_test.rb
@@ -10,8 +10,8 @@ class ActiveStorage::Service::MirrorServiceTest < ActiveSupport::TestCase
end.to_h
config = mirror_config.merge \
- mirror: { service: "Mirror", primary: "primary", mirrors: mirror_config.keys },
- primary: { service: "Disk", root: Dir.mktmpdir("active_storage_tests_primary") }
+ mirror: { service: "Mirror", primary: "primary", mirrors: mirror_config.keys },
+ primary: { service: "Disk", root: Dir.mktmpdir("active_storage_tests_primary") }
SERVICE = ActiveStorage::Service.configure :mirror, config
@@ -19,11 +19,16 @@ class ActiveStorage::Service::MirrorServiceTest < ActiveSupport::TestCase
test "uploading to all services" do
begin
- data = "Something else entirely!"
- key = upload(data, to: @service)
+ key = SecureRandom.base58(24)
+ data = "Something else entirely!"
+ io = StringIO.new(data)
+ checksum = Digest::MD5.base64digest(data)
- assert_equal data, SERVICE.primary.download(key)
- SERVICE.mirrors.each do |mirror|
+ @service.upload key, io.tap(&:read), checksum: checksum
+ assert_predicate io, :eof?
+
+ assert_equal data, @service.primary.download(key)
+ @service.mirrors.each do |mirror|
assert_equal data, mirror.download(key)
end
ensure
@@ -32,17 +37,21 @@ class ActiveStorage::Service::MirrorServiceTest < ActiveSupport::TestCase
end
test "downloading from primary service" do
- data = "Something else entirely!"
- key = upload(data, to: SERVICE.primary)
+ key = SecureRandom.base58(24)
+ data = "Something else entirely!"
+ checksum = Digest::MD5.base64digest(data)
+
+ @service.primary.upload key, StringIO.new(data), checksum: checksum
assert_equal data, @service.download(key)
end
test "deleting from all services" do
- @service.delete FIXTURE_KEY
- assert_not SERVICE.primary.exist?(FIXTURE_KEY)
+ @service.delete @key
+
+ assert_not SERVICE.primary.exist?(@key)
SERVICE.mirrors.each do |mirror|
- assert_not mirror.exist?(FIXTURE_KEY)
+ assert_not mirror.exist?(@key)
end
end
@@ -50,17 +59,8 @@ class ActiveStorage::Service::MirrorServiceTest < ActiveSupport::TestCase
filename = ActiveStorage::Filename.new("test.txt")
freeze_time do
- assert_equal SERVICE.primary.url(FIXTURE_KEY, expires_in: 2.minutes, disposition: :inline, filename: filename, content_type: "text/plain"),
- @service.url(FIXTURE_KEY, expires_in: 2.minutes, disposition: :inline, filename: filename, content_type: "text/plain")
+ assert_equal @service.primary.url(@key, expires_in: 2.minutes, disposition: :inline, filename: filename, content_type: "text/plain"),
+ @service.url(@key, expires_in: 2.minutes, disposition: :inline, filename: filename, content_type: "text/plain")
end
end
-
- private
- def upload(data, to:)
- SecureRandom.base58(24).tap do |key|
- io = StringIO.new(data).tap(&:read)
- @service.upload key, io, checksum: Digest::MD5.base64digest(data)
- assert io.eof?
- end
- end
end
diff --git a/activestorage/test/service/s3_service_test.rb b/activestorage/test/service/s3_service_test.rb
index c3818422aa..4bfcda017f 100644
--- a/activestorage/test/service/s3_service_test.rb
+++ b/activestorage/test/service/s3_service_test.rb
@@ -3,7 +3,7 @@
require "service/shared_service_tests"
require "net/http"
-if SERVICE_CONFIGURATIONS[:s3] && SERVICE_CONFIGURATIONS[:s3][:access_key_id].present?
+if SERVICE_CONFIGURATIONS[:s3]
class ActiveStorage::Service::S3ServiceTest < ActiveSupport::TestCase
SERVICE = ActiveStorage::Service.configure(:s3, SERVICE_CONFIGURATIONS)
@@ -32,10 +32,10 @@ if SERVICE_CONFIGURATIONS[:s3] && SERVICE_CONFIGURATIONS[:s3][:access_key_id].pr
end
test "signed URL generation" do
- url = @service.url(FIXTURE_KEY, expires_in: 5.minutes,
+ url = @service.url(@key, expires_in: 5.minutes,
disposition: :inline, filename: ActiveStorage::Filename.new("avatar.png"), content_type: "image/png")
- assert_match(/s3\.(\S+)?amazonaws.com.*response-content-disposition=inline.*avatar\.png.*response-content-type=image%2Fpng/, url)
+ assert_match(/s3(-[-a-z0-9]+)?\.(\S+)?amazonaws.com.*response-content-disposition=inline.*avatar\.png.*response-content-type=image%2Fpng/, url)
assert_match SERVICE_CONFIGURATIONS[:s3][:bucket], url
end
diff --git a/activestorage/test/service/shared_service_tests.rb b/activestorage/test/service/shared_service_tests.rb
index ade91ab89a..30cfca4e36 100644
--- a/activestorage/test/service/shared_service_tests.rb
+++ b/activestorage/test/service/shared_service_tests.rb
@@ -6,17 +6,17 @@ require "active_support/core_ext/securerandom"
module ActiveStorage::Service::SharedServiceTests
extend ActiveSupport::Concern
- FIXTURE_KEY = SecureRandom.base58(24)
FIXTURE_DATA = "\211PNG\r\n\032\n\000\000\000\rIHDR\000\000\000\020\000\000\000\020\001\003\000\000\000%=m\"\000\000\000\006PLTE\000\000\000\377\377\377\245\331\237\335\000\000\0003IDATx\234c\370\377\237\341\377_\206\377\237\031\016\2603\334?\314p\1772\303\315\315\f7\215\031\356\024\203\320\275\317\f\367\201R\314\f\017\300\350\377\177\000Q\206\027(\316]\233P\000\000\000\000IEND\256B`\202".dup.force_encoding(Encoding::BINARY)
included do
setup do
+ @key = SecureRandom.base58(24)
@service = self.class.const_get(:SERVICE)
- @service.upload FIXTURE_KEY, StringIO.new(FIXTURE_DATA)
+ @service.upload @key, StringIO.new(FIXTURE_DATA)
end
teardown do
- @service.delete FIXTURE_KEY
+ @service.delete @key
end
test "uploading with integrity" do
@@ -47,27 +47,40 @@ module ActiveStorage::Service::SharedServiceTests
end
test "downloading" do
- assert_equal FIXTURE_DATA, @service.download(FIXTURE_KEY)
+ assert_equal FIXTURE_DATA, @service.download(@key)
end
test "downloading in chunks" do
- chunks = []
+ key = SecureRandom.base58(24)
+ expected_chunks = [ "a" * 5.megabytes, "b" ]
+ actual_chunks = []
- @service.download(FIXTURE_KEY) do |chunk|
- chunks << chunk
+ begin
+ @service.upload key, StringIO.new(expected_chunks.join)
+
+ @service.download key do |chunk|
+ actual_chunks << chunk
+ end
+
+ assert_equal expected_chunks, actual_chunks, "Downloaded chunks did not match uploaded data"
+ ensure
+ @service.delete key
end
+ end
- assert_equal [ FIXTURE_DATA ], chunks
+ test "downloading partially" do
+ assert_equal "\x10\x00\x00", @service.download_chunk(@key, 19..21)
+ assert_equal "\x10\x00\x00", @service.download_chunk(@key, 19...22)
end
test "existing" do
- assert @service.exist?(FIXTURE_KEY)
- assert_not @service.exist?(FIXTURE_KEY + "nonsense")
+ assert @service.exist?(@key)
+ assert_not @service.exist?(@key + "nonsense")
end
test "deleting" do
- @service.delete FIXTURE_KEY
- assert_not @service.exist?(FIXTURE_KEY)
+ @service.delete @key
+ assert_not @service.exist?(@key)
end
test "deleting nonexistent key" do
@@ -75,5 +88,22 @@ module ActiveStorage::Service::SharedServiceTests
@service.delete SecureRandom.base58(24)
end
end
+
+ test "deleting by prefix" do
+ begin
+ @service.upload("a/a/a", StringIO.new(FIXTURE_DATA))
+ @service.upload("a/a/b", StringIO.new(FIXTURE_DATA))
+ @service.upload("a/b/a", StringIO.new(FIXTURE_DATA))
+
+ @service.delete_prefixed("a/a/")
+ assert_not @service.exist?("a/a/a")
+ assert_not @service.exist?("a/a/b")
+ assert @service.exist?("a/b/a")
+ ensure
+ @service.delete("a/a/a")
+ @service.delete("a/a/b")
+ @service.delete("a/b/a")
+ end
+ end
end
end
diff --git a/activestorage/test/template/image_tag_test.rb b/activestorage/test/template/image_tag_test.rb
index 80f183e0e3..f0b166c225 100644
--- a/activestorage/test/template/image_tag_test.rb
+++ b/activestorage/test/template/image_tag_test.rb
@@ -33,7 +33,7 @@ class ActiveStorage::ImageTagTest < ActionView::TestCase
test "error when attachment's empty" do
@user = User.create!(name: "DHH")
- assert_not @user.avatar.attached?
+ assert_not_predicate @user.avatar, :attached?
assert_raises(ArgumentError) { image_tag(@user.avatar) }
end
diff --git a/activestorage/test/test_helper.rb b/activestorage/test/test_helper.rb
index 38408cdad3..7b7926ac79 100644
--- a/activestorage/test/test_helper.rb
+++ b/activestorage/test/test_helper.rb
@@ -1,12 +1,13 @@
# frozen_string_literal: true
+ENV["RAILS_ENV"] ||= "test"
require_relative "dummy/config/environment.rb"
require "bundler/setup"
require "active_support"
require "active_support/test_case"
require "active_support/testing/autorun"
-require "mini_magick"
+require "image_processing/mini_magick"
begin
require "byebug"
@@ -40,22 +41,48 @@ ActiveStorage.verifier = ActiveSupport::MessageVerifier.new("Testing")
class ActiveSupport::TestCase
self.file_fixture_path = File.expand_path("fixtures/files", __dir__)
+ setup do
+ ActiveStorage::Current.host = "https://example.com"
+ end
+
+ teardown do
+ ActiveStorage::Current.reset
+ end
+
private
- def create_blob(data: "Hello world!", filename: "hello.txt", content_type: "text/plain")
- ActiveStorage::Blob.create_after_upload! io: StringIO.new(data), filename: filename, content_type: content_type
+ def create_blob(data: "Hello world!", filename: "hello.txt", content_type: "text/plain", identify: true)
+ ActiveStorage::Blob.create_after_upload! io: StringIO.new(data), filename: filename, content_type: content_type, identify: identify
end
- def create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg")
- ActiveStorage::Blob.create_after_upload! io: file_fixture(filename).open, filename: filename, content_type: content_type
+ def create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg", metadata: nil)
+ ActiveStorage::Blob.create_after_upload! io: file_fixture(filename).open, filename: filename, content_type: content_type, metadata: metadata
end
def create_blob_before_direct_upload(filename: "hello.txt", byte_size:, checksum:, content_type: "text/plain")
ActiveStorage::Blob.create_before_direct_upload! filename: filename, byte_size: byte_size, checksum: checksum, content_type: content_type
end
+ def directly_upload_file_blob(filename: "racecar.jpg", content_type: "image/jpeg")
+ file = file_fixture(filename)
+ byte_size = file.size
+ checksum = Digest::MD5.file(file).base64digest
+
+ create_blob_before_direct_upload(filename: filename, byte_size: byte_size, checksum: checksum, content_type: content_type).tap do |blob|
+ ActiveStorage::Blob.service.upload(blob.key, file.open)
+ end
+ end
+
def read_image(blob_or_variant)
MiniMagick::Image.open blob_or_variant.service.send(:path_for, blob_or_variant.key)
end
+
+ def extract_metadata_from(blob)
+ blob.tap(&:analyze).metadata
+ end
+
+ def fixture_file_upload(filename)
+ Rack::Test::UploadedFile.new file_fixture(filename).to_s
+ end
end
require "global_id"
@@ -63,6 +90,15 @@ GlobalID.app = "ActiveStorageExampleApp"
ActiveRecord::Base.send :include, GlobalID::Identification
class User < ActiveRecord::Base
+ validates :name, presence: true
+
has_one_attached :avatar
+ has_one_attached :cover_photo, dependent: false
+
has_many_attached :highlights
+ has_many_attached :vlogs, dependent: false
+end
+
+class Group < ActiveRecord::Base
+ has_one_attached :avatar
end
diff --git a/activestorage/webpack.config.js b/activestorage/webpack.config.js
deleted file mode 100644
index 92c4530e7f..0000000000
--- a/activestorage/webpack.config.js
+++ /dev/null
@@ -1,27 +0,0 @@
-const webpack = require("webpack")
-const path = require("path")
-
-module.exports = {
- entry: {
- "activestorage": path.resolve(__dirname, "app/javascript/activestorage/index.js"),
- },
-
- output: {
- filename: "[name].js",
- path: path.resolve(__dirname, "app/assets/javascripts"),
- library: "ActiveStorage",
- libraryTarget: "umd"
- },
-
- module: {
- rules: [
- {
- test: /\.js$/,
- exclude: /node_modules/,
- use: {
- loader: "babel-loader"
- }
- }
- ]
- }
-}
diff --git a/activestorage/yarn.lock b/activestorage/yarn.lock
index dd09577445..44eae3c5b1 100644
--- a/activestorage/yarn.lock
+++ b/activestorage/yarn.lock
@@ -2,15 +2,13 @@
# yarn lockfile v1
-abbrev@1:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f"
+"@types/estree@0.0.38":
+ version "0.0.38"
+ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.38.tgz#c1be40aa933723c608820a99a373a16d215a1ca2"
-acorn-dynamic-import@^2.0.0:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/acorn-dynamic-import/-/acorn-dynamic-import-2.0.2.tgz#c752bd210bef679501b6c6cb7fc84f8f47158cc4"
- dependencies:
- acorn "^4.0.3"
+"@types/node@*":
+ version "9.6.6"
+ resolved "https://registry.yarnpkg.com/@types/node/-/node-9.6.6.tgz#439b91f9caf3983cad2eef1e11f6bedcbf9431d2"
acorn-jsx@^3.0.0:
version "3.0.1"
@@ -22,11 +20,7 @@ acorn@^3.0.4:
version "3.3.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a"
-acorn@^4.0.3:
- version "4.0.13"
- resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
-
-acorn@^5.0.0, acorn@^5.0.1:
+acorn@^5.0.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.1.1.tgz#53fe161111f912ab999ee887a90a0bc52822fd75"
@@ -34,18 +28,14 @@ ajv-keywords@^1.0.0:
version "1.5.1"
resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-1.5.1.tgz#314dd0a4b3368fad3dfcdc54ede6171b886daf3c"
-ajv-keywords@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.0.tgz#a296e17f7bfae7c1ce4f7e0de53d29cb32162df0"
-
-ajv@^4.7.0, ajv@^4.9.1:
+ajv@^4.7.0:
version "4.11.8"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536"
dependencies:
co "^4.6.0"
json-stable-stringify "^1.0.1"
-ajv@^5.1.5, ajv@^5.2.0:
+ajv@^5.2.0:
version "5.2.2"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.2.2.tgz#47c68d69e86f5d953103b0074a9430dc63da5e39"
dependencies:
@@ -54,14 +44,6 @@ ajv@^5.1.5, ajv@^5.2.0:
json-schema-traverse "^0.3.0"
json-stable-stringify "^1.0.1"
-align-text@^0.1.1, align-text@^0.1.3:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117"
- dependencies:
- kind-of "^3.0.2"
- longest "^1.0.1"
- repeat-string "^1.5.2"
-
ansi-escapes@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-2.0.0.tgz#5bae52be424878dd9783e8910e3fc2922e83c81b"
@@ -84,24 +66,6 @@ ansi-styles@^3.1.0:
dependencies:
color-convert "^1.9.0"
-anymatch@^1.3.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.0.tgz#a3e52fa39168c825ff57b0248126ce5a8ff95507"
- dependencies:
- arrify "^1.0.0"
- micromatch "^2.1.5"
-
-aproba@^1.0.3:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.1.2.tgz#45c6629094de4e96f693ef7eab74ae079c240fc1"
-
-are-we-there-yet@~1.1.2:
- version "1.1.4"
- resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.4.tgz#bb5dca382bb94f05e15194373d16fd3ba1ca110d"
- dependencies:
- delegates "^1.0.0"
- readable-stream "^2.0.6"
-
argparse@^1.0.7:
version "1.0.9"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86"
@@ -136,54 +100,6 @@ arrify@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d"
-asn1.js@^4.0.0:
- version "4.9.1"
- resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.9.1.tgz#48ba240b45a9280e94748990ba597d216617fd40"
- dependencies:
- bn.js "^4.0.0"
- inherits "^2.0.1"
- minimalistic-assert "^1.0.0"
-
-asn1@~0.2.3:
- version "0.2.3"
- resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86"
-
-assert-plus@1.0.0, assert-plus@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525"
-
-assert-plus@^0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234"
-
-assert@^1.1.1:
- version "1.4.1"
- resolved "https://registry.yarnpkg.com/assert/-/assert-1.4.1.tgz#99912d591836b5a6f5b345c0f07eefc08fc65d91"
- dependencies:
- util "0.10.3"
-
-async-each@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d"
-
-async@^2.1.2:
- version "2.5.0"
- resolved "https://registry.yarnpkg.com/async/-/async-2.5.0.tgz#843190fd6b7357a0b9e1c956edddd5ec8462b54d"
- dependencies:
- lodash "^4.14.0"
-
-asynckit@^0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
-
-aws-sign2@~0.6.0:
- version "0.6.0"
- resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f"
-
-aws4@^1.2.1:
- version "1.6.0"
- resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e"
-
babel-code-frame@^6.22.0:
version "6.22.0"
resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4"
@@ -330,14 +246,6 @@ babel-helpers@^6.24.1:
babel-runtime "^6.22.0"
babel-template "^6.24.1"
-babel-loader@^7.1.1:
- version "7.1.1"
- resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-7.1.1.tgz#b87134c8b12e3e4c2a94e0546085bc680a2b8488"
- dependencies:
- find-cache-dir "^1.0.0"
- loader-utils "^1.0.2"
- mkdirp "^0.5.1"
-
babel-messages@^6.23.0:
version "6.23.0"
resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e"
@@ -350,6 +258,12 @@ babel-plugin-check-es2015-constants@^6.22.0:
dependencies:
babel-runtime "^6.22.0"
+babel-plugin-external-helpers@^6.22.0:
+ version "6.22.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-external-helpers/-/babel-plugin-external-helpers-6.22.0.tgz#2285f48b02bd5dede85175caf8c62e86adccefa1"
+ dependencies:
+ babel-runtime "^6.22.0"
+
babel-plugin-syntax-async-functions@^6.8.0:
version "6.13.0"
resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz#cad9cad1191b5ad634bf30ae0872391e0647be95"
@@ -654,40 +568,6 @@ balanced-match@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
-base64-js@^1.0.2:
- version "1.2.1"
- resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.1.tgz#a91947da1f4a516ea38e5b4ec0ec3773675e0886"
-
-bcrypt-pbkdf@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d"
- dependencies:
- tweetnacl "^0.14.3"
-
-big.js@^3.1.3:
- version "3.1.3"
- resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.1.3.tgz#4cada2193652eb3ca9ec8e55c9015669c9806978"
-
-binary-extensions@^1.0.0:
- version "1.9.0"
- resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.9.0.tgz#66506c16ce6f4d6928a5b3cd6a33ca41e941e37b"
-
-block-stream@*:
- version "0.0.9"
- resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a"
- dependencies:
- inherits "~2.0.0"
-
-bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0:
- version "4.11.7"
- resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.7.tgz#ddb048e50d9482790094c13eb3fcfc833ce7ab46"
-
-boom@2.x.x:
- version "2.10.1"
- resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f"
- dependencies:
- hoek "2.x.x"
-
brace-expansion@^1.1.7:
version "1.1.8"
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292"
@@ -703,61 +583,6 @@ braces@^1.8.2:
preserve "^0.2.0"
repeat-element "^1.1.2"
-brorand@^1.0.1:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f"
-
-browserify-aes@^1.0.0, browserify-aes@^1.0.4:
- version "1.0.6"
- resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.0.6.tgz#5e7725dbdef1fd5930d4ebab48567ce451c48a0a"
- dependencies:
- buffer-xor "^1.0.2"
- cipher-base "^1.0.0"
- create-hash "^1.1.0"
- evp_bytestokey "^1.0.0"
- inherits "^2.0.1"
-
-browserify-cipher@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.0.tgz#9988244874bf5ed4e28da95666dcd66ac8fc363a"
- dependencies:
- browserify-aes "^1.0.4"
- browserify-des "^1.0.0"
- evp_bytestokey "^1.0.0"
-
-browserify-des@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.0.tgz#daa277717470922ed2fe18594118a175439721dd"
- dependencies:
- cipher-base "^1.0.1"
- des.js "^1.0.0"
- inherits "^2.0.1"
-
-browserify-rsa@^4.0.0:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524"
- dependencies:
- bn.js "^4.1.0"
- randombytes "^2.0.1"
-
-browserify-sign@^4.0.0:
- version "4.0.4"
- resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298"
- dependencies:
- bn.js "^4.1.1"
- browserify-rsa "^4.0.0"
- create-hash "^1.1.0"
- create-hmac "^1.1.2"
- elliptic "^6.0.0"
- inherits "^2.0.1"
- parse-asn1 "^5.0.0"
-
-browserify-zlib@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.1.4.tgz#bb35f8a519f600e0fa6b8485241c979d0141fb2d"
- dependencies:
- pako "~0.2.0"
-
browserslist@^2.1.2:
version "2.2.2"
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-2.2.2.tgz#e9b4618b8a01c193f9786beea09f6fd10dbe31c3"
@@ -765,25 +590,13 @@ browserslist@^2.1.2:
caniuse-lite "^1.0.30000704"
electron-to-chromium "^1.3.16"
-buffer-xor@^1.0.2:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9"
-
-buffer@^4.3.0:
- version "4.9.1"
- resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.1.tgz#6d1bb601b07a4efced97094132093027c95bc298"
- dependencies:
- base64-js "^1.0.2"
- ieee754 "^1.1.4"
- isarray "^1.0.0"
-
builtin-modules@^1.0.0, builtin-modules@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f"
-builtin-status-codes@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8"
+builtin-modules@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-2.0.0.tgz#60b7ef5ae6546bd7deefa74b08b62a43a232648e"
caller-path@^0.1.0:
version "0.1.0"
@@ -795,29 +608,10 @@ callsites@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca"
-camelcase@^1.0.2:
- version "1.2.1"
- resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39"
-
-camelcase@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd"
-
caniuse-lite@^1.0.30000704:
version "1.0.30000706"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000706.tgz#bc59abc41ba7d4a3634dda95befded6114e1f24e"
-caseless@~0.12.0:
- version "0.12.0"
- resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
-
-center-align@^0.1.1:
- version "0.1.3"
- resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad"
- dependencies:
- align-text "^0.1.3"
- lazy-cache "^1.0.3"
-
chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
@@ -836,28 +630,6 @@ chalk@^2.0.0:
escape-string-regexp "^1.0.5"
supports-color "^4.0.0"
-chokidar@^1.7.0:
- version "1.7.0"
- resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468"
- dependencies:
- anymatch "^1.3.0"
- async-each "^1.0.0"
- glob-parent "^2.0.0"
- inherits "^2.0.1"
- is-binary-path "^1.0.0"
- is-glob "^2.0.0"
- path-is-absolute "^1.0.0"
- readdirp "^2.0.0"
- optionalDependencies:
- fsevents "^1.0.0"
-
-cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de"
- dependencies:
- inherits "^2.0.1"
- safe-buffer "^5.0.1"
-
circular-json@^0.3.1:
version "0.3.3"
resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.3.tgz#815c99ea84f6809529d2f45791bdf82711352d66"
@@ -872,30 +644,10 @@ cli-width@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.1.0.tgz#b234ca209b29ef66fc518d9b98d5847b00edf00a"
-cliui@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1"
- dependencies:
- center-align "^0.1.1"
- right-align "^0.1.1"
- wordwrap "0.0.2"
-
-cliui@^3.2.0:
- version "3.2.0"
- resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d"
- dependencies:
- string-width "^1.0.1"
- strip-ansi "^3.0.1"
- wrap-ansi "^2.0.0"
-
co@^4.6.0:
version "4.6.0"
resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
-code-point-at@^1.0.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
-
color-convert@^1.9.0:
version "1.9.0"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.0.tgz#1accf97dd739b983bf994d56fec8f95853641b7a"
@@ -906,15 +658,9 @@ color-name@^1.1.1:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
-combined-stream@^1.0.5, combined-stream@~1.0.5:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009"
- dependencies:
- delayed-stream "~1.0.0"
-
-commondir@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
+commander@~2.13.0:
+ version "2.13.0"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-2.13.0.tgz#6964bca67685df7c1f1430c584f07d7597885b9c"
concat-map@0.0.1:
version "0.0.1"
@@ -928,20 +674,6 @@ concat-stream@^1.6.0:
readable-stream "^2.2.2"
typedarray "^0.0.6"
-console-browserify@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.1.0.tgz#f0241c45730a9fc6323b206dbf38edc741d0bb10"
- dependencies:
- date-now "^0.1.4"
-
-console-control-strings@^1.0.0, console-control-strings@~1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
-
-constants-browserify@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75"
-
contains-path@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a"
@@ -958,34 +690,7 @@ core-util-is@~1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
-create-ecdh@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.0.tgz#888c723596cdf7612f6498233eebd7a35301737d"
- dependencies:
- bn.js "^4.1.0"
- elliptic "^6.0.0"
-
-create-hash@^1.1.0, create-hash@^1.1.1, create-hash@^1.1.2:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.1.3.tgz#606042ac8b9262750f483caddab0f5819172d8fd"
- dependencies:
- cipher-base "^1.0.1"
- inherits "^2.0.1"
- ripemd160 "^2.0.0"
- sha.js "^2.4.0"
-
-create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4:
- version "1.1.6"
- resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.6.tgz#acb9e221a4e17bdb076e90657c42b93e3726cf06"
- dependencies:
- cipher-base "^1.0.3"
- create-hash "^1.1.0"
- inherits "^2.0.1"
- ripemd160 "^2.0.0"
- safe-buffer "^5.0.1"
- sha.js "^2.4.8"
-
-cross-spawn@^5.0.1, cross-spawn@^5.1.0:
+cross-spawn@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449"
dependencies:
@@ -993,57 +698,12 @@ cross-spawn@^5.0.1, cross-spawn@^5.1.0:
shebang-command "^1.2.0"
which "^1.2.9"
-cryptiles@2.x.x:
- version "2.0.5"
- resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
- dependencies:
- boom "2.x.x"
-
-crypto-browserify@^3.11.0:
- version "3.11.1"
- resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.11.1.tgz#948945efc6757a400d6e5e5af47194d10064279f"
- dependencies:
- browserify-cipher "^1.0.0"
- browserify-sign "^4.0.0"
- create-ecdh "^4.0.0"
- create-hash "^1.1.0"
- create-hmac "^1.1.0"
- diffie-hellman "^5.0.0"
- inherits "^2.0.1"
- pbkdf2 "^3.0.3"
- public-encrypt "^4.0.0"
- randombytes "^2.0.0"
-
-d@1:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/d/-/d-1.0.0.tgz#754bb5bfe55451da69a58b94d45f4c5b0462d58f"
- dependencies:
- es5-ext "^0.10.9"
-
-dashdash@^1.12.0:
- version "1.14.1"
- resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"
- dependencies:
- assert-plus "^1.0.0"
-
-date-now@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b"
-
debug@^2.1.1, debug@^2.2.0, debug@^2.6.8:
version "2.6.8"
resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.8.tgz#e731531ca2ede27d188222427da17821d68ff4fc"
dependencies:
ms "2.0.0"
-decamelize@^1.0.0, decamelize@^1.1.1:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
-
-deep-extend@~0.4.0:
- version "0.4.2"
- resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f"
-
deep-is@~0.1.3:
version "0.1.3"
resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
@@ -1060,35 +720,12 @@ del@^2.0.2:
pinkie-promise "^2.0.0"
rimraf "^2.2.8"
-delayed-stream@~1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
-
-delegates@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a"
-
-des.js@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.0.tgz#c074d2e2aa6a8a9a07dbd61f9a15c2cd83ec8ecc"
- dependencies:
- inherits "^2.0.1"
- minimalistic-assert "^1.0.0"
-
detect-indent@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208"
dependencies:
repeating "^2.0.0"
-diffie-hellman@^5.0.0:
- version "5.0.2"
- resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.2.tgz#b5835739270cfe26acf632099fded2a07f209e5e"
- dependencies:
- bn.js "^4.1.0"
- miller-rabin "^4.0.0"
- randombytes "^2.0.0"
-
doctrine@1.5.0:
version "1.5.0"
resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa"
@@ -1103,128 +740,20 @@ doctrine@^2.0.0:
esutils "^2.0.2"
isarray "^1.0.0"
-domain-browser@^1.1.1:
- version "1.1.7"
- resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.1.7.tgz#867aa4b093faa05f1de08c06f4d7b21fdf8698bc"
-
-ecc-jsbn@~0.1.1:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505"
- dependencies:
- jsbn "~0.1.0"
-
electron-to-chromium@^1.3.16:
version "1.3.16"
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.16.tgz#d0e026735754770901ae301a21664cba45d92f7d"
-elliptic@^6.0.0:
- version "6.4.0"
- resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.4.0.tgz#cac9af8762c85836187003c8dfe193e5e2eae5df"
- dependencies:
- bn.js "^4.4.0"
- brorand "^1.0.1"
- hash.js "^1.0.0"
- hmac-drbg "^1.0.0"
- inherits "^2.0.1"
- minimalistic-assert "^1.0.0"
- minimalistic-crypto-utils "^1.0.0"
-
-emojis-list@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389"
-
-enhanced-resolve@^3.4.0:
- version "3.4.1"
- resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz#0421e339fd71419b3da13d129b3979040230476e"
- dependencies:
- graceful-fs "^4.1.2"
- memory-fs "^0.4.0"
- object-assign "^4.0.1"
- tapable "^0.2.7"
-
-errno@^0.1.3:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.4.tgz#b896e23a9e5e8ba33871fc996abd3635fc9a1c7d"
- dependencies:
- prr "~0.0.0"
-
error-ex@^1.2.0:
version "1.3.1"
resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc"
dependencies:
is-arrayish "^0.2.1"
-es5-ext@^0.10.14, es5-ext@^0.10.9, es5-ext@~0.10.14:
- version "0.10.24"
- resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.24.tgz#a55877c9924bc0c8d9bd3c2cbe17495ac1709b14"
- dependencies:
- es6-iterator "2"
- es6-symbol "~3.1"
-
-es6-iterator@2, es6-iterator@^2.0.1, es6-iterator@~2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.1.tgz#8e319c9f0453bf575d374940a655920e59ca5512"
- dependencies:
- d "1"
- es5-ext "^0.10.14"
- es6-symbol "^3.1"
-
-es6-map@^0.1.3:
- version "0.1.5"
- resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.5.tgz#9136e0503dcc06a301690f0bb14ff4e364e949f0"
- dependencies:
- d "1"
- es5-ext "~0.10.14"
- es6-iterator "~2.0.1"
- es6-set "~0.1.5"
- es6-symbol "~3.1.1"
- event-emitter "~0.3.5"
-
-es6-set@~0.1.5:
- version "0.1.5"
- resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1"
- dependencies:
- d "1"
- es5-ext "~0.10.14"
- es6-iterator "~2.0.1"
- es6-symbol "3.1.1"
- event-emitter "~0.3.5"
-
-es6-symbol@3.1.1, es6-symbol@^3.1, es6-symbol@^3.1.1, es6-symbol@~3.1, es6-symbol@~3.1.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77"
- dependencies:
- d "1"
- es5-ext "~0.10.14"
-
-es6-weak-map@^2.0.1:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.2.tgz#5e3ab32251ffd1538a1f8e5ffa1357772f92d96f"
- dependencies:
- d "1"
- es5-ext "^0.10.14"
- es6-iterator "^2.0.1"
- es6-symbol "^3.1.1"
-
escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
-escope@^3.6.0:
- version "3.6.0"
- resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3"
- dependencies:
- es6-map "^0.1.3"
- es6-weak-map "^2.0.1"
- esrecurse "^4.1.0"
- estraverse "^4.1.1"
-
-eslint-config-airbnb-base@^11.3.1:
- version "11.3.1"
- resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-11.3.1.tgz#c0ab108c9beed503cb999e4c60f4ef98eda0ed30"
- dependencies:
- eslint-restricted-globals "^0.1.1"
-
eslint-import-resolver-node@^0.3.1:
version "0.3.1"
resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.1.tgz#4422574cde66a9a7b099938ee4d508a199e0e3cc"
@@ -1254,10 +783,6 @@ eslint-plugin-import@^2.7.0:
minimatch "^3.0.3"
read-pkg-up "^2.0.0"
-eslint-restricted-globals@^0.1.1:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/eslint-restricted-globals/-/eslint-restricted-globals-0.1.1.tgz#35f0d5cbc64c2e3ed62e93b4b1a7af05ba7ed4d7"
-
eslint-scope@^3.7.1:
version "3.7.1"
resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-3.7.1.tgz#3d63c3edfda02e06e01a452ad88caacc7cdcb6e8"
@@ -1334,38 +859,21 @@ estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13"
-esutils@^2.0.2:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
-
-event-emitter@~0.3.5:
- version "0.3.5"
- resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39"
- dependencies:
- d "1"
- es5-ext "~0.10.14"
+estree-walker@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.2.1.tgz#bdafe8095383d8414d5dc2ecf4c9173b6db9412e"
-events@^1.0.0:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
+estree-walker@^0.3.0:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.3.1.tgz#e6b1a51cf7292524e7237c312e5fe6660c1ce1aa"
-evp_bytestokey@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.0.tgz#497b66ad9fef65cd7c08a6180824ba1476b66e53"
- dependencies:
- create-hash "^1.1.1"
+estree-walker@^0.5.1:
+ version "0.5.1"
+ resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.5.1.tgz#64fc375053abc6f57d73e9bd2f004644ad3c5854"
-execa@^0.7.0:
- version "0.7.0"
- resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777"
- dependencies:
- cross-spawn "^5.0.1"
- get-stream "^3.0.0"
- is-stream "^1.1.0"
- npm-run-path "^2.0.0"
- p-finally "^1.0.0"
- signal-exit "^3.0.0"
- strip-eof "^1.0.0"
+esutils@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
expand-brackets@^0.1.4:
version "0.1.5"
@@ -1379,10 +887,6 @@ expand-range@^1.8.1:
dependencies:
fill-range "^2.1.0"
-extend@~3.0.0:
- version "3.0.1"
- resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444"
-
external-editor@^2.0.4:
version "2.0.4"
resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.0.4.tgz#1ed9199da9cbfe2ef2f7a31b2fde8b0d12368972"
@@ -1397,10 +901,6 @@ extglob@^0.3.1:
dependencies:
is-extglob "^1.0.0"
-extsprintf@1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.0.2.tgz#e1080e0658e300b06294990cc70e1502235fd550"
-
fast-deep-equal@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff"
@@ -1436,14 +936,6 @@ fill-range@^2.1.0:
repeat-element "^1.1.2"
repeat-string "^1.5.2"
-find-cache-dir@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-1.0.0.tgz#9288e3e9e3cc3748717d39eade17cf71fc30ee6f"
- dependencies:
- commondir "^1.0.1"
- make-dir "^1.0.0"
- pkg-dir "^2.0.0"
-
find-up@^1.0.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f"
@@ -1451,7 +943,7 @@ find-up@^1.0.0:
path-exists "^2.0.0"
pinkie-promise "^2.0.0"
-find-up@^2.0.0, find-up@^2.1.0:
+find-up@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
dependencies:
@@ -1476,46 +968,10 @@ for-own@^0.1.4:
dependencies:
for-in "^1.0.1"
-forever-agent@~0.6.1:
- version "0.6.1"
- resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
-
-form-data@~2.1.1:
- version "2.1.4"
- resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1"
- dependencies:
- asynckit "^0.4.0"
- combined-stream "^1.0.5"
- mime-types "^2.1.12"
-
fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
-fsevents@^1.0.0:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.2.tgz#3282b713fb3ad80ede0e9fcf4611b5aa6fc033f4"
- dependencies:
- nan "^2.3.0"
- node-pre-gyp "^0.6.36"
-
-fstream-ignore@^1.0.5:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105"
- dependencies:
- fstream "^1.0.0"
- inherits "2"
- minimatch "^3.0.0"
-
-fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2:
- version "1.0.11"
- resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.11.tgz#5c1fb1f117477114f0632a0eb4b71b3cb0fd3171"
- dependencies:
- graceful-fs "^4.1.2"
- inherits "~2.0.0"
- mkdirp ">=0.5 0"
- rimraf "2"
-
function-bind@^1.0.2:
version "1.1.0"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.0.tgz#16176714c801798e4e8f2cf7f7529467bb4a5771"
@@ -1524,33 +980,6 @@ functional-red-black-tree@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
-gauge@~2.7.3:
- version "2.7.4"
- resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
- dependencies:
- aproba "^1.0.3"
- console-control-strings "^1.0.0"
- has-unicode "^2.0.0"
- object-assign "^4.1.0"
- signal-exit "^3.0.0"
- string-width "^1.0.1"
- strip-ansi "^3.0.1"
- wide-align "^1.1.0"
-
-get-caller-file@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5"
-
-get-stream@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14"
-
-getpass@^0.1.1:
- version "0.1.7"
- resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa"
- dependencies:
- assert-plus "^1.0.0"
-
glob-base@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4"
@@ -1594,17 +1023,6 @@ graceful-fs@^4.1.2:
version "4.1.11"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658"
-har-schema@^1.0.5:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e"
-
-har-validator@~4.2.1:
- version "4.2.1"
- resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a"
- dependencies:
- ajv "^4.9.1"
- har-schema "^1.0.5"
-
has-ansi@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
@@ -1615,50 +1033,12 @@ has-flag@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51"
-has-unicode@^2.0.0:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9"
-
has@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28"
dependencies:
function-bind "^1.0.2"
-hash-base@^2.0.0:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-2.0.2.tgz#66ea1d856db4e8a5470cadf6fce23ae5244ef2e1"
- dependencies:
- inherits "^2.0.1"
-
-hash.js@^1.0.0, hash.js@^1.0.3:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.3.tgz#340dedbe6290187151c1ea1d777a3448935df846"
- dependencies:
- inherits "^2.0.3"
- minimalistic-assert "^1.0.0"
-
-hawk@~3.1.3:
- version "3.1.3"
- resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4"
- dependencies:
- boom "2.x.x"
- cryptiles "2.x.x"
- hoek "2.x.x"
- sntp "1.x.x"
-
-hmac-drbg@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
- dependencies:
- hash.js "^1.0.3"
- minimalistic-assert "^1.0.0"
- minimalistic-crypto-utils "^1.0.1"
-
-hoek@2.x.x:
- version "2.16.3"
- resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed"
-
home-or-tmp@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8"
@@ -1670,26 +1050,10 @@ hosted-git-info@^2.1.4:
version "2.5.0"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c"
-http-signature@~1.1.0:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf"
- dependencies:
- assert-plus "^0.2.0"
- jsprim "^1.2.2"
- sshpk "^1.7.0"
-
-https-browserify@0.0.1:
- version "0.0.1"
- resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-0.0.1.tgz#3f91365cabe60b77ed0ebba24b454e3e09d95a82"
-
iconv-lite@^0.4.17:
version "0.4.18"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.18.tgz#23d8656b16aae6742ac29732ea8f0336a4789cf2"
-ieee754@^1.1.4:
- version "1.1.8"
- resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4"
-
ignore@^3.3.3:
version "3.3.3"
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.3.tgz#432352e57accd87ab3110e82d3fea0e47812156d"
@@ -1698,10 +1062,6 @@ imurmurhash@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
-indexof@0.0.1:
- version "0.0.1"
- resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d"
-
inflight@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
@@ -1709,18 +1069,10 @@ inflight@^1.0.4:
once "^1.3.0"
wrappy "1"
-inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3:
+inherits@2, inherits@^2.0.3, inherits@~2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
-inherits@2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1"
-
-ini@~1.3.0:
- version "1.3.4"
- resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e"
-
inquirer@^3.0.6:
version "3.2.1"
resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.2.1.tgz#06ceb0f540f45ca548c17d6840959878265fa175"
@@ -1740,30 +1092,16 @@ inquirer@^3.0.6:
strip-ansi "^4.0.0"
through "^2.3.6"
-interpret@^1.0.0:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.0.3.tgz#cbc35c62eeee73f19ab7b10a801511401afc0f90"
-
invariant@^2.2.0, invariant@^2.2.2:
version "2.2.2"
resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360"
dependencies:
loose-envify "^1.0.0"
-invert-kv@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6"
-
is-arrayish@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
-is-binary-path@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898"
- dependencies:
- binary-extensions "^1.0.0"
-
is-buffer@^1.1.5:
version "1.1.5"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.5.tgz#1f3b26ef613b214b88cbca23cc6c01d87961eecc"
@@ -1798,12 +1136,6 @@ is-finite@^1.0.0:
dependencies:
number-is-nan "^1.0.0"
-is-fullwidth-code-point@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb"
- dependencies:
- number-is-nan "^1.0.0"
-
is-fullwidth-code-point@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
@@ -1814,6 +1146,10 @@ is-glob@^2.0.0, is-glob@^2.0.1:
dependencies:
is-extglob "^1.0.0"
+is-module@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591"
+
is-number@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f"
@@ -1860,14 +1196,6 @@ is-resolvable@^1.0.0:
dependencies:
tryit "^1.0.1"
-is-stream@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
-
-is-typedarray@~1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
-
isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
@@ -1882,10 +1210,6 @@ isobject@^2.0.0:
dependencies:
isarray "1.0.0"
-isstream@~0.1.2:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
-
js-tokens@^3.0.0:
version "3.0.2"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
@@ -1897,10 +1221,6 @@ js-yaml@^3.8.4:
argparse "^1.0.7"
esprima "^4.0.0"
-jsbn@~0.1.0:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
-
jschardet@^1.4.2:
version "1.5.0"
resolved "https://registry.yarnpkg.com/jschardet/-/jschardet-1.5.0.tgz#a61f310306a5a71188e1b1acd08add3cfbb08b1e"
@@ -1913,29 +1233,17 @@ jsesc@~0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
-json-loader@^0.5.4:
- version "0.5.7"
- resolved "https://registry.yarnpkg.com/json-loader/-/json-loader-0.5.7.tgz#dca14a70235ff82f0ac9a3abeb60d337a365185d"
-
json-schema-traverse@^0.3.0:
version "0.3.1"
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340"
-json-schema@0.2.3:
- version "0.2.3"
- resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13"
-
json-stable-stringify@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af"
dependencies:
jsonify "~0.0.0"
-json-stringify-safe@~5.0.1:
- version "5.0.1"
- resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
-
-json5@^0.5.0, json5@^0.5.1:
+json5@^0.5.0:
version "0.5.1"
resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821"
@@ -1943,15 +1251,6 @@ jsonify@~0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73"
-jsprim@^1.2.2:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.0.tgz#a3b87e40298d8c380552d8cc7628a0bb95a22918"
- dependencies:
- assert-plus "1.0.0"
- extsprintf "1.0.2"
- json-schema "0.2.3"
- verror "1.3.6"
-
kind-of@^3.0.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
@@ -1964,16 +1263,6 @@ kind-of@^4.0.0:
dependencies:
is-buffer "^1.1.5"
-lazy-cache@^1.0.3:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e"
-
-lcid@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835"
- dependencies:
- invert-kv "^1.0.0"
-
levn@^0.3.0, levn@~0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee"
@@ -1990,18 +1279,6 @@ load-json-file@^2.0.0:
pify "^2.0.0"
strip-bom "^3.0.0"
-loader-runner@^2.3.0:
- version "2.3.0"
- resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.3.0.tgz#f482aea82d543e07921700d5a46ef26fdac6b8a2"
-
-loader-utils@^1.0.2, loader-utils@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.1.0.tgz#c98aef488bcceda2ffb5e2de646d6a754429f5cd"
- dependencies:
- big.js "^3.1.3"
- emojis-list "^2.0.0"
- json5 "^0.5.0"
-
locate-path@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
@@ -2013,14 +1290,10 @@ lodash.cond@^4.3.0:
version "4.5.2"
resolved "https://registry.yarnpkg.com/lodash.cond/-/lodash.cond-4.5.2.tgz#f471a1da486be60f6ab955d17115523dd1d255d5"
-lodash@^4.0.0, lodash@^4.14.0, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0:
+lodash@^4.0.0, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0:
version "4.17.4"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae"
-longest@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097"
-
loose-envify@^1.0.0:
version "1.3.1"
resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848"
@@ -2034,26 +1307,13 @@ lru-cache@^4.0.1:
pseudomap "^1.0.2"
yallist "^2.1.2"
-make-dir@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.0.0.tgz#97a011751e91dd87cfadef58832ebb04936de978"
- dependencies:
- pify "^2.3.0"
-
-mem@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76"
+magic-string@^0.22.4:
+ version "0.22.5"
+ resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.22.5.tgz#8e9cf5afddf44385c1da5bc2a6a0dbd10b03657e"
dependencies:
- mimic-fn "^1.0.0"
+ vlq "^0.2.2"
-memory-fs@^0.4.0, memory-fs@~0.4.1:
- version "0.4.1"
- resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552"
- dependencies:
- errno "^0.1.3"
- readable-stream "^2.0.1"
-
-micromatch@^2.1.5:
+micromatch@^2.3.11:
version "2.3.11"
resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565"
dependencies:
@@ -2071,36 +1331,11 @@ micromatch@^2.1.5:
parse-glob "^3.0.4"
regex-cache "^0.4.2"
-miller-rabin@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.0.tgz#4a62fb1d42933c05583982f4c716f6fb9e6c6d3d"
- dependencies:
- bn.js "^4.0.0"
- brorand "^1.0.1"
-
-mime-db@~1.29.0:
- version "1.29.0"
- resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.29.0.tgz#48d26d235589651704ac5916ca06001914266878"
-
-mime-types@^2.1.12, mime-types@~2.1.7:
- version "2.1.16"
- resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.16.tgz#2b858a52e5ecd516db897ac2be87487830698e23"
- dependencies:
- mime-db "~1.29.0"
-
mimic-fn@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18"
-minimalistic-assert@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.0.tgz#702be2dda6b37f4836bcb3f5db56641b64a1d3d3"
-
-minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a"
-
-minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4:
+minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
dependencies:
@@ -2110,11 +1345,7 @@ minimist@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
-minimist@^1.2.0:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
-
-"mkdirp@>=0.5 0", mkdirp@^0.5.1, mkdirp@~0.5.0:
+mkdirp@^0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
dependencies:
@@ -2128,63 +1359,10 @@ mute-stream@0.0.7:
version "0.0.7"
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab"
-nan@^2.3.0:
- version "2.6.2"
- resolved "https://registry.yarnpkg.com/nan/-/nan-2.6.2.tgz#e4ff34e6c95fdfb5aecc08de6596f43605a7db45"
-
natural-compare@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
-node-libs-browser@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.0.0.tgz#a3a59ec97024985b46e958379646f96c4b616646"
- dependencies:
- assert "^1.1.1"
- browserify-zlib "^0.1.4"
- buffer "^4.3.0"
- console-browserify "^1.1.0"
- constants-browserify "^1.0.0"
- crypto-browserify "^3.11.0"
- domain-browser "^1.1.1"
- events "^1.0.0"
- https-browserify "0.0.1"
- os-browserify "^0.2.0"
- path-browserify "0.0.0"
- process "^0.11.0"
- punycode "^1.2.4"
- querystring-es3 "^0.2.0"
- readable-stream "^2.0.5"
- stream-browserify "^2.0.1"
- stream-http "^2.3.1"
- string_decoder "^0.10.25"
- timers-browserify "^2.0.2"
- tty-browserify "0.0.0"
- url "^0.11.0"
- util "^0.10.3"
- vm-browserify "0.0.4"
-
-node-pre-gyp@^0.6.36:
- version "0.6.36"
- resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.36.tgz#db604112cb74e0d477554e9b505b17abddfab786"
- dependencies:
- mkdirp "^0.5.1"
- nopt "^4.0.1"
- npmlog "^4.0.2"
- rc "^1.1.7"
- request "^2.81.0"
- rimraf "^2.6.1"
- semver "^5.3.0"
- tar "^2.2.1"
- tar-pack "^3.4.0"
-
-nopt@^4.0.1:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d"
- dependencies:
- abbrev "1"
- osenv "^0.1.4"
-
normalize-package-data@^2.3.2:
version "2.4.0"
resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f"
@@ -2200,30 +1378,11 @@ normalize-path@^2.0.1:
dependencies:
remove-trailing-separator "^1.0.1"
-npm-run-path@^2.0.0:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
- dependencies:
- path-key "^2.0.0"
-
-npmlog@^4.0.2:
- version "4.1.2"
- resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b"
- dependencies:
- are-we-there-yet "~1.1.2"
- console-control-strings "~1.1.0"
- gauge "~2.7.3"
- set-blocking "~2.0.0"
-
number-is-nan@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
-oauth-sign@~0.8.1:
- version "0.8.2"
- resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43"
-
-object-assign@^4.0.1, object-assign@^4.1.0:
+object-assign@^4.0.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
@@ -2234,7 +1393,7 @@ object.omit@^2.0.0:
for-own "^0.1.4"
is-extendable "^0.1.1"
-once@^1.3.0, once@^1.3.3:
+once@^1.3.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
dependencies:
@@ -2257,37 +1416,14 @@ optionator@^0.8.2:
type-check "~0.3.2"
wordwrap "~1.0.0"
-os-browserify@^0.2.0:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.2.1.tgz#63fc4ccee5d2d7763d26bbf8601078e6c2e0044f"
-
os-homedir@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
-os-locale@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2"
- dependencies:
- execa "^0.7.0"
- lcid "^1.0.0"
- mem "^1.1.0"
-
-os-tmpdir@^1.0.0, os-tmpdir@^1.0.1, os-tmpdir@~1.0.1:
+os-tmpdir@^1.0.1, os-tmpdir@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
-osenv@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.4.tgz#42fe6d5953df06c8064be6f176c3d05aaaa34644"
- dependencies:
- os-homedir "^1.0.0"
- os-tmpdir "^1.0.0"
-
-p-finally@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
-
p-limit@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.1.0.tgz#b07ff2d9a5d88bec806035895a2bab66a27988bc"
@@ -2298,20 +1434,6 @@ p-locate@^2.0.0:
dependencies:
p-limit "^1.1.0"
-pako@~0.2.0:
- version "0.2.9"
- resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75"
-
-parse-asn1@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.0.tgz#37c4f9b7ed3ab65c74817b5f2480937fbf97c712"
- dependencies:
- asn1.js "^4.0.0"
- browserify-aes "^1.0.0"
- create-hash "^1.1.0"
- evp_bytestokey "^1.0.0"
- pbkdf2 "^3.0.3"
-
parse-glob@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c"
@@ -2327,10 +1449,6 @@ parse-json@^2.2.0:
dependencies:
error-ex "^1.2.0"
-path-browserify@0.0.0:
- version "0.0.0"
- resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.0.tgz#a0b870729aae214005b7d5032ec2cbbb0fb4451a"
-
path-exists@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b"
@@ -2349,10 +1467,6 @@ path-is-inside@^1.0.1, path-is-inside@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53"
-path-key@^2.0.0:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
-
path-parse@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1"
@@ -2363,21 +1477,7 @@ path-type@^2.0.0:
dependencies:
pify "^2.0.0"
-pbkdf2@^3.0.3:
- version "3.0.12"
- resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.12.tgz#be36785c5067ea48d806ff923288c5f750b6b8a2"
- dependencies:
- create-hash "^1.1.2"
- create-hmac "^1.1.4"
- ripemd160 "^2.0.1"
- safe-buffer "^5.0.1"
- sha.js "^2.4.8"
-
-performance-now@^0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5"
-
-pify@^2.0.0, pify@^2.3.0:
+pify@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
@@ -2397,12 +1497,6 @@ pkg-dir@^1.0.0:
dependencies:
find-up "^1.0.0"
-pkg-dir@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b"
- dependencies:
- find-up "^2.1.0"
-
pluralize@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-4.0.0.tgz#59b708c1c0190a2f692f1c7618c446b052fd1762"
@@ -2423,52 +1517,14 @@ process-nextick-args@~1.0.6:
version "1.0.7"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3"
-process@^0.11.0:
- version "0.11.10"
- resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
-
progress@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.0.tgz#8a1be366bf8fc23db2bd23f10c6fe920b4389d1f"
-prr@~0.0.0:
- version "0.0.0"
- resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a"
-
pseudomap@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
-public-encrypt@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.0.tgz#39f699f3a46560dd5ebacbca693caf7c65c18cc6"
- dependencies:
- bn.js "^4.1.0"
- browserify-rsa "^4.0.0"
- create-hash "^1.1.0"
- parse-asn1 "^5.0.0"
- randombytes "^2.0.1"
-
-punycode@1.3.2:
- version "1.3.2"
- resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
-
-punycode@^1.2.4, punycode@^1.4.1:
- version "1.4.1"
- resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
-
-qs@~6.4.0:
- version "6.4.0"
- resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233"
-
-querystring-es3@^0.2.0:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73"
-
-querystring@0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
-
randomatic@^1.1.3:
version "1.1.7"
resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.7.tgz#c7abe9cc8b87c0baa876b19fde83fd464797e38c"
@@ -2476,21 +1532,6 @@ randomatic@^1.1.3:
is-number "^3.0.0"
kind-of "^4.0.0"
-randombytes@^2.0.0, randombytes@^2.0.1:
- version "2.0.5"
- resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.0.5.tgz#dc009a246b8d09a177b4b7a0ae77bc570f4b1b79"
- dependencies:
- safe-buffer "^5.1.0"
-
-rc@^1.1.7:
- version "1.2.1"
- resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.1.tgz#2e03e8e42ee450b8cb3dce65be1bf8974e1dfd95"
- dependencies:
- deep-extend "~0.4.0"
- ini "~1.3.0"
- minimist "^1.2.0"
- strip-json-comments "~2.0.1"
-
read-pkg-up@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be"
@@ -2506,7 +1547,7 @@ read-pkg@^2.0.0:
normalize-package-data "^2.3.2"
path-type "^2.0.0"
-readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.2.2, readable-stream@^2.2.6:
+readable-stream@^2.2.2:
version "2.3.3"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c"
dependencies:
@@ -2518,15 +1559,6 @@ readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable
string_decoder "~1.0.3"
util-deprecate "~1.0.1"
-readdirp@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78"
- dependencies:
- graceful-fs "^4.1.2"
- minimatch "^3.0.2"
- readable-stream "^2.0.2"
- set-immediate-shim "^1.0.1"
-
regenerate@^1.2.1:
version "1.3.2"
resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.3.2.tgz#d1941c67bad437e1be76433add5b385f95b19260"
@@ -2586,41 +1618,6 @@ repeating@^2.0.0:
dependencies:
is-finite "^1.0.0"
-request@^2.81.0:
- version "2.81.0"
- resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0"
- dependencies:
- aws-sign2 "~0.6.0"
- aws4 "^1.2.1"
- caseless "~0.12.0"
- combined-stream "~1.0.5"
- extend "~3.0.0"
- forever-agent "~0.6.1"
- form-data "~2.1.1"
- har-validator "~4.2.1"
- hawk "~3.1.3"
- http-signature "~1.1.0"
- is-typedarray "~1.0.0"
- isstream "~0.1.2"
- json-stringify-safe "~5.0.1"
- mime-types "~2.1.7"
- oauth-sign "~0.8.1"
- performance-now "^0.2.0"
- qs "~6.4.0"
- safe-buffer "^5.0.1"
- stringstream "~0.0.4"
- tough-cookie "~2.3.0"
- tunnel-agent "^0.6.0"
- uuid "^3.0.0"
-
-require-directory@^2.1.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
-
-require-main-filename@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1"
-
require-uncached@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3"
@@ -2632,6 +1629,12 @@ resolve-from@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226"
+resolve@^1.1.6, resolve@^1.5.0:
+ version "1.7.1"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.7.1.tgz#aadd656374fd298aee895bc026b8297418677fd3"
+ dependencies:
+ path-parse "^1.0.5"
+
resolve@^1.2.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.4.0.tgz#a75be01c53da25d934a98ebd0e4c4a7312f92a86"
@@ -2645,24 +1648,61 @@ restore-cursor@^2.0.0:
onetime "^2.0.0"
signal-exit "^3.0.2"
-right-align@^0.1.1:
- version "0.1.3"
- resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef"
- dependencies:
- align-text "^0.1.1"
-
-rimraf@2, rimraf@^2.2.8, rimraf@^2.5.1, rimraf@^2.6.1:
+rimraf@^2.2.8:
version "2.6.1"
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d"
dependencies:
glob "^7.0.5"
-ripemd160@^2.0.0, ripemd160@^2.0.1:
+rollup-plugin-babel@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/rollup-plugin-babel/-/rollup-plugin-babel-3.0.4.tgz#41b3e762fe64450dd61da3105a2cf7ad76be4edc"
+ dependencies:
+ rollup-pluginutils "^1.5.0"
+
+rollup-plugin-commonjs@^9.1.0:
+ version "9.1.0"
+ resolved "https://registry.yarnpkg.com/rollup-plugin-commonjs/-/rollup-plugin-commonjs-9.1.0.tgz#468341aab32499123ee9a04b22f51d9bf26fdd94"
+ dependencies:
+ estree-walker "^0.5.1"
+ magic-string "^0.22.4"
+ resolve "^1.5.0"
+ rollup-pluginutils "^2.0.1"
+
+rollup-plugin-node-resolve@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-3.3.0.tgz#c26d110a36812cbefa7ce117cadcd3439aa1c713"
+ dependencies:
+ builtin-modules "^2.0.0"
+ is-module "^1.0.0"
+ resolve "^1.1.6"
+
+rollup-plugin-uglify@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/rollup-plugin-uglify/-/rollup-plugin-uglify-3.0.0.tgz#a34eca24617709c6bf1778e9653baafa06099b86"
+ dependencies:
+ uglify-es "^3.3.7"
+
+rollup-pluginutils@^1.5.0:
+ version "1.5.2"
+ resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-1.5.2.tgz#1e156e778f94b7255bfa1b3d0178be8f5c552408"
+ dependencies:
+ estree-walker "^0.2.1"
+ minimatch "^3.0.2"
+
+rollup-pluginutils@^2.0.1:
version "2.0.1"
- resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.1.tgz#0f4584295c53a3628af7e6d79aca21ce57d1c6e7"
+ resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.0.1.tgz#7ec95b3573f6543a46a6461bd9a7c544525d0fc0"
+ dependencies:
+ estree-walker "^0.3.0"
+ micromatch "^2.3.11"
+
+rollup@^0.58.2:
+ version "0.58.2"
+ resolved "https://registry.yarnpkg.com/rollup/-/rollup-0.58.2.tgz#2feddea8c0c022f3e74b35c48e3c21b3433803ce"
dependencies:
- hash-base "^2.0.0"
- inherits "^2.0.1"
+ "@types/estree" "0.0.38"
+ "@types/node" "*"
run-async@^2.2.0:
version "2.3.0"
@@ -2680,7 +1720,7 @@ rx-lite@*, rx-lite@^4.0.8:
version "4.0.8"
resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444"
-safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
@@ -2688,24 +1728,6 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.4.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e"
-set-blocking@^2.0.0, set-blocking@~2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
-
-set-immediate-shim@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61"
-
-setimmediate@^1.0.4:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
-
-sha.js@^2.4.0, sha.js@^2.4.8:
- version "2.4.8"
- resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.8.tgz#37068c2c476b6baf402d14a49c67f597921f634f"
- dependencies:
- inherits "^2.0.1"
-
shebang-command@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
@@ -2716,7 +1738,7 @@ shebang-regex@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
-signal-exit@^3.0.0, signal-exit@^3.0.2:
+signal-exit@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
@@ -2728,26 +1750,20 @@ slice-ansi@0.0.4:
version "0.0.4"
resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35"
-sntp@1.x.x:
- version "1.0.9"
- resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198"
- dependencies:
- hoek "2.x.x"
-
-source-list-map@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.0.tgz#aaa47403f7b245a92fbc97ea08f250d6087ed085"
-
source-map-support@^0.4.2:
version "0.4.15"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.15.tgz#03202df65c06d2bd8c7ec2362a193056fef8d3b1"
dependencies:
source-map "^0.5.6"
-source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6, source-map@~0.5.1, source-map@~0.5.3:
+source-map@^0.5.0, source-map@^0.5.6:
version "0.5.6"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412"
+source-map@~0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
+
spark-md5@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.0.tgz#3722227c54e2faf24b1dc6d933cc144e6f71bfef"
@@ -2770,45 +1786,6 @@ sprintf-js@~1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
-sshpk@^1.7.0:
- version "1.13.1"
- resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.1.tgz#512df6da6287144316dc4c18fe1cf1d940739be3"
- dependencies:
- asn1 "~0.2.3"
- assert-plus "^1.0.0"
- dashdash "^1.12.0"
- getpass "^0.1.1"
- optionalDependencies:
- bcrypt-pbkdf "^1.0.0"
- ecc-jsbn "~0.1.1"
- jsbn "~0.1.0"
- tweetnacl "~0.14.0"
-
-stream-browserify@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.1.tgz#66266ee5f9bdb9940a4e4514cafb43bb71e5c9db"
- dependencies:
- inherits "~2.0.1"
- readable-stream "^2.0.2"
-
-stream-http@^2.3.1:
- version "2.7.2"
- resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.7.2.tgz#40a050ec8dc3b53b33d9909415c02c0bf1abfbad"
- dependencies:
- builtin-status-codes "^3.0.0"
- inherits "^2.0.1"
- readable-stream "^2.2.6"
- to-arraybuffer "^1.0.0"
- xtend "^4.0.0"
-
-string-width@^1.0.1, string-width@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3"
- dependencies:
- code-point-at "^1.0.0"
- is-fullwidth-code-point "^1.0.0"
- strip-ansi "^3.0.0"
-
string-width@^2.0.0, string-width@^2.1.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
@@ -2816,21 +1793,13 @@ string-width@^2.0.0, string-width@^2.1.0:
is-fullwidth-code-point "^2.0.0"
strip-ansi "^4.0.0"
-string_decoder@^0.10.25:
- version "0.10.31"
- resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
-
string_decoder@~1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab"
dependencies:
safe-buffer "~5.1.0"
-stringstream@~0.0.4:
- version "0.0.5"
- resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878"
-
-strip-ansi@^3.0.0, strip-ansi@^3.0.1:
+strip-ansi@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
dependencies:
@@ -2846,10 +1815,6 @@ strip-bom@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
-strip-eof@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
-
strip-json-comments@~2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
@@ -2858,7 +1823,7 @@ supports-color@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
-supports-color@^4.0.0, supports-color@^4.2.1:
+supports-color@^4.0.0:
version "4.2.1"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.2.1.tgz#65a4bb2631e90e02420dba5554c375a4754bb836"
dependencies:
@@ -2875,31 +1840,6 @@ table@^4.0.1:
slice-ansi "0.0.4"
string-width "^2.0.0"
-tapable@^0.2.7:
- version "0.2.7"
- resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.7.tgz#e46c0daacbb2b8a98b9b0cea0f4052105817ed5c"
-
-tar-pack@^3.4.0:
- version "3.4.0"
- resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.0.tgz#23be2d7f671a8339376cbdb0b8fe3fdebf317984"
- dependencies:
- debug "^2.2.0"
- fstream "^1.0.10"
- fstream-ignore "^1.0.5"
- once "^1.3.3"
- readable-stream "^2.1.4"
- rimraf "^2.5.1"
- tar "^2.2.1"
- uid-number "^0.0.6"
-
-tar@^2.2.1:
- version "2.2.1"
- resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1"
- dependencies:
- block-stream "*"
- fstream "^1.0.2"
- inherits "2"
-
text-table@~0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
@@ -2908,32 +1848,16 @@ through@^2.3.6:
version "2.3.8"
resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
-timers-browserify@^2.0.2:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.2.tgz#ab4883cf597dcd50af211349a00fbca56ac86b86"
- dependencies:
- setimmediate "^1.0.4"
-
tmp@^0.0.31:
version "0.0.31"
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.31.tgz#8f38ab9438e17315e5dbd8b3657e8bfb277ae4a7"
dependencies:
os-tmpdir "~1.0.1"
-to-arraybuffer@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43"
-
to-fast-properties@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47"
-tough-cookie@~2.3.0:
- version "2.3.2"
- resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a"
- dependencies:
- punycode "^1.4.1"
-
trim-right@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
@@ -2942,20 +1866,6 @@ tryit@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/tryit/-/tryit-1.0.3.tgz#393be730a9446fd1ead6da59a014308f36c289cb"
-tty-browserify@0.0.0:
- version "0.0.0"
- resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6"
-
-tunnel-agent@^0.6.0:
- version "0.6.0"
- resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
- dependencies:
- safe-buffer "^5.0.1"
-
-tweetnacl@^0.14.3, tweetnacl@~0.14.0:
- version "0.14.5"
- resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
-
type-check@~0.3.2:
version "0.3.2"
resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72"
@@ -2966,52 +1876,17 @@ typedarray@^0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
-uglify-js@^2.8.29:
- version "2.8.29"
- resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd"
- dependencies:
- source-map "~0.5.1"
- yargs "~3.10.0"
- optionalDependencies:
- uglify-to-browserify "~1.0.0"
-
-uglify-to-browserify@~1.0.0:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7"
-
-uglifyjs-webpack-plugin@^0.4.6:
- version "0.4.6"
- resolved "https://registry.yarnpkg.com/uglifyjs-webpack-plugin/-/uglifyjs-webpack-plugin-0.4.6.tgz#b951f4abb6bd617e66f63eb891498e391763e309"
+uglify-es@^3.3.7:
+ version "3.3.9"
+ resolved "https://registry.yarnpkg.com/uglify-es/-/uglify-es-3.3.9.tgz#0c1c4f0700bed8dbc124cdb304d2592ca203e677"
dependencies:
- source-map "^0.5.6"
- uglify-js "^2.8.29"
- webpack-sources "^1.0.1"
-
-uid-number@^0.0.6:
- version "0.0.6"
- resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81"
-
-url@^0.11.0:
- version "0.11.0"
- resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1"
- dependencies:
- punycode "1.3.2"
- querystring "0.2.0"
+ commander "~2.13.0"
+ source-map "~0.6.1"
util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
-util@0.10.3, util@^0.10.3:
- version "0.10.3"
- resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9"
- dependencies:
- inherits "2.0.1"
-
-uuid@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04"
-
validate-npm-package-license@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc"
@@ -3019,63 +1894,9 @@ validate-npm-package-license@^3.0.1:
spdx-correct "~1.0.0"
spdx-expression-parse "~1.0.0"
-verror@1.3.6:
- version "1.3.6"
- resolved "https://registry.yarnpkg.com/verror/-/verror-1.3.6.tgz#cff5df12946d297d2baaefaa2689e25be01c005c"
- dependencies:
- extsprintf "1.0.2"
-
-vm-browserify@0.0.4:
- version "0.0.4"
- resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-0.0.4.tgz#5d7ea45bbef9e4a6ff65f95438e0a87c357d5a73"
- dependencies:
- indexof "0.0.1"
-
-watchpack@^1.4.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.4.0.tgz#4a1472bcbb952bd0a9bb4036801f954dfb39faac"
- dependencies:
- async "^2.1.2"
- chokidar "^1.7.0"
- graceful-fs "^4.1.2"
-
-webpack-sources@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.0.1.tgz#c7356436a4d13123be2e2426a05d1dad9cbe65cf"
- dependencies:
- source-list-map "^2.0.0"
- source-map "~0.5.3"
-
-webpack@^3.4.0:
- version "3.4.0"
- resolved "https://registry.yarnpkg.com/webpack/-/webpack-3.4.0.tgz#e9465b660ad79dd2d33874d968b31746ea9a8e63"
- dependencies:
- acorn "^5.0.0"
- acorn-dynamic-import "^2.0.0"
- ajv "^5.1.5"
- ajv-keywords "^2.0.0"
- async "^2.1.2"
- enhanced-resolve "^3.4.0"
- escope "^3.6.0"
- interpret "^1.0.0"
- json-loader "^0.5.4"
- json5 "^0.5.1"
- loader-runner "^2.3.0"
- loader-utils "^1.1.0"
- memory-fs "~0.4.1"
- mkdirp "~0.5.0"
- node-libs-browser "^2.0.0"
- source-map "^0.5.3"
- supports-color "^4.2.1"
- tapable "^0.2.7"
- uglifyjs-webpack-plugin "^0.4.6"
- watchpack "^1.4.0"
- webpack-sources "^1.0.1"
- yargs "^8.0.2"
-
-which-module@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
+vlq@^0.2.2:
+ version "0.2.3"
+ resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
which@^1.2.9:
version "1.2.14"
@@ -3083,31 +1904,10 @@ which@^1.2.9:
dependencies:
isexe "^2.0.0"
-wide-align@^1.1.0:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.2.tgz#571e0f1b0604636ebc0dfc21b0339bbe31341710"
- dependencies:
- string-width "^1.0.2"
-
-window-size@0.1.0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d"
-
-wordwrap@0.0.2:
- version "0.0.2"
- resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f"
-
wordwrap@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb"
-wrap-ansi@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85"
- dependencies:
- string-width "^1.0.1"
- strip-ansi "^3.0.1"
-
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
@@ -3118,47 +1918,6 @@ write@^0.2.1:
dependencies:
mkdirp "^0.5.1"
-xtend@^4.0.0:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"
-
-y18n@^3.2.1:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41"
-
yallist@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52"
-
-yargs-parser@^7.0.0:
- version "7.0.0"
- resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-7.0.0.tgz#8d0ac42f16ea55debd332caf4c4038b3e3f5dfd9"
- dependencies:
- camelcase "^4.1.0"
-
-yargs@^8.0.2:
- version "8.0.2"
- resolved "https://registry.yarnpkg.com/yargs/-/yargs-8.0.2.tgz#6299a9055b1cefc969ff7e79c1d918dceb22c360"
- dependencies:
- camelcase "^4.1.0"
- cliui "^3.2.0"
- decamelize "^1.1.1"
- get-caller-file "^1.0.1"
- os-locale "^2.0.0"
- read-pkg-up "^2.0.0"
- require-directory "^2.1.1"
- require-main-filename "^1.0.1"
- set-blocking "^2.0.0"
- string-width "^2.0.0"
- which-module "^2.0.0"
- y18n "^3.2.1"
- yargs-parser "^7.0.0"
-
-yargs@~3.10.0:
- version "3.10.0"
- resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1"
- dependencies:
- camelcase "^1.0.2"
- cliui "^2.1.0"
- decamelize "^1.0.0"
- window-size "0.1.0"