diff options
Diffstat (limited to 'activestorage/app')
25 files changed, 1270 insertions, 0 deletions
diff --git a/activestorage/app/assets/javascripts/activestorage.js b/activestorage/app/assets/javascripts/activestorage.js new file mode 100644 index 0000000000..c1c0a2f6d9 --- /dev/null +++ b/activestorage/app/assets/javascripts/activestorage.js @@ -0,0 +1 @@ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ActiveStorage=e():t.ActiveStorage=e()}(this,function(){return function(t){function e(n){if(r[n])return r[n].exports;var i=r[n]={i:n,l:!1,exports:{}};return t[n].call(i.exports,i,i.exports,e),i.l=!0,i.exports}var r={};return e.m=t,e.c=r,e.d=function(t,r,n){e.o(t,r)||Object.defineProperty(t,r,{configurable:!1,enumerable:!0,get:n})},e.n=function(t){var r=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(r,"a",r),r},e.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},e.p="",e(e.s=2)}([function(t,e,r){"use strict";function n(t){var e=a(document.head,'meta[name="'+t+'"]');if(e)return e.getAttribute("content")}function i(t,e){return"string"==typeof t&&(e=t,t=document),o(t.querySelectorAll(e))}function a(t,e){return"string"==typeof t&&(e=t,t=document),t.querySelector(e)}function u(t,e){var r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},n=r.bubbles,i=r.cancelable,a=r.detail,u=document.createEvent("Event");return u.initEvent(e,n||!0,i||!0),u.detail=a||{},t.dispatchEvent(u),u}function o(t){return Array.isArray(t)?t:Array.from?Array.from(t):[].slice.call(t)}e.d=n,e.c=i,e.b=a,e.a=u,e.e=o},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function i(t,e){if(t&&"function"==typeof t[e]){for(var r=arguments.length,n=Array(r>2?r-2:0),i=2;i<r;i++)n[i-2]=arguments[i];return t[e].apply(t,n)}}r.d(e,"a",function(){return c});var a=r(6),u=r(8),o=r(9),s=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),f=0,c=function(){function t(e,r,i){n(this,t),this.id=++f,this.file=e,this.url=r,this.delegate=i}return s(t,[{key:"create",value:function(t){var e=this;a.a.create(this.file,function(r,n){var a=new u.a(e.file,n,e.url);i(e.delegate,"directUploadWillCreateBlobWithXHR",a.xhr),a.create(function(r){if(r)t(r);else{var n=new o.a(a);i(e.delegate,"directUploadWillStoreFileWithXHR",n.xhr),n.create(function(e){e?t(e):t(null,a.toJSON())})}})})}}]),t}()},function(t,e,r){"use strict";function n(){window.ActiveStorage&&Object(i.a)()}Object.defineProperty(e,"__esModule",{value:!0});var i=r(3),a=r(1);r.d(e,"start",function(){return i.a}),r.d(e,"DirectUpload",function(){return a.a}),setTimeout(n,1)},function(t,e,r){"use strict";function n(){d||(d=!0,document.addEventListener("submit",i),document.addEventListener("ajax:before",a))}function i(t){u(t)}function a(t){"FORM"==t.target.tagName&&u(t)}function u(t){var e=t.target;if(e.hasAttribute(l))return void t.preventDefault();var r=new c.a(e),n=r.inputs;n.length&&(t.preventDefault(),e.setAttribute(l,""),n.forEach(s),r.start(function(t){e.removeAttribute(l),t?n.forEach(f):o(e)}))}function o(t){var e=Object(h.b)(t,"input[type=submit]");if(e){var r=e,n=r.disabled;e.disabled=!1,e.focus(),e.click(),e.disabled=n}else e=document.createElement("input"),e.type="submit",e.style="display:none",t.appendChild(e),e.click(),t.removeChild(e)}function s(t){t.disabled=!0}function f(t){t.disabled=!1}e.a=n;var c=r(4),h=r(0),l="data-direct-uploads-processing",d=!1},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return s});var i=r(5),a=r(0),u=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),o="input[type=file][data-direct-upload-url]:not([disabled])",s=function(){function t(e){n(this,t),this.form=e,this.inputs=Object(a.c)(e,o).filter(function(t){return t.files.length})}return u(t,[{key:"start",value:function(t){var e=this,r=this.createDirectUploadControllers();this.dispatch("start"),function n(){var i=r.shift();i?i.start(function(r){r?(t(r),e.dispatch("end")):n()}):(t(),e.dispatch("end"))}()}},{key:"createDirectUploadControllers",value:function(){var t=[];return this.inputs.forEach(function(e){Object(a.e)(e.files).forEach(function(r){var n=new i.a(e,r);t.push(n)})}),t}},{key:"dispatch",value:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return Object(a.a)(this.form,"direct-uploads:"+t,{detail:e})}}]),t}()},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return o});var i=r(1),a=r(0),u=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),o=function(){function t(e,r){n(this,t),this.input=e,this.file=r,this.directUpload=new i.a(this.file,this.url,this),this.dispatch("initialize")}return u(t,[{key:"start",value:function(t){var e=this,r=document.createElement("input");r.type="hidden",r.name=this.input.name,this.input.insertAdjacentElement("beforebegin",r),this.dispatch("start"),this.directUpload.create(function(n,i){n?(r.parentNode.removeChild(r),e.dispatchError(n)):r.value=i.signed_id,e.dispatch("end"),t(n)})}},{key:"uploadRequestDidProgress",value:function(t){var e=t.loaded/t.total*100;e&&this.dispatch("progress",{progress:e})}},{key:"dispatch",value:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return e.file=this.file,e.id=this.directUpload.id,Object(a.a)(this.input,"direct-upload:"+t,{detail:e})}},{key:"dispatchError",value:function(t){this.dispatch("error",{error:t}).defaultPrevented||alert(t)}},{key:"directUploadWillCreateBlobWithXHR",value:function(t){this.dispatch("before-blob-request",{xhr:t})}},{key:"directUploadWillStoreFileWithXHR",value:function(t){var e=this;this.dispatch("before-storage-request",{xhr:t}),t.upload.addEventListener("progress",function(t){return e.uploadRequestDidProgress(t)})}},{key:"url",get:function(){return this.input.getAttribute("data-direct-upload-url")}}]),t}()},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return s});var i=r(7),a=r.n(i),u=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),o=File.prototype.slice||File.prototype.mozSlice||File.prototype.webkitSlice,s=function(){function t(e){n(this,t),this.file=e,this.chunkSize=2097152,this.chunkCount=Math.ceil(this.file.size/this.chunkSize),this.chunkIndex=0}return u(t,null,[{key:"create",value:function(e,r){new t(e).create(r)}}]),u(t,[{key:"create",value:function(t){var e=this;this.callback=t,this.md5Buffer=new a.a.ArrayBuffer,this.fileReader=new FileReader,this.fileReader.addEventListener("load",function(t){return e.fileReaderDidLoad(t)}),this.fileReader.addEventListener("error",function(t){return e.fileReaderDidError(t)}),this.readNextChunk()}},{key:"fileReaderDidLoad",value:function(t){if(this.md5Buffer.append(t.target.result),!this.readNextChunk()){var e=this.md5Buffer.end(!0),r=btoa(e);this.callback(null,r)}}},{key:"fileReaderDidError",value:function(t){this.callback("Error reading "+this.file.name)}},{key:"readNextChunk",value:function(){if(this.chunkIndex<this.chunkCount){var t=this.chunkIndex*this.chunkSize,e=Math.min(t+this.chunkSize,this.file.size),r=o.call(this.file,t,e);return this.fileReader.readAsArrayBuffer(r),this.chunkIndex++,!0}return!1}}]),t}()},function(t,e,r){!function(e){t.exports=e()}(function(t){"use strict";function e(t,e){var r=t[0],n=t[1],i=t[2],a=t[3];r+=(n&i|~n&a)+e[0]-680876936|0,r=(r<<7|r>>>25)+n|0,a+=(r&n|~r&i)+e[1]-389564586|0,a=(a<<12|a>>>20)+r|0,i+=(a&r|~a&n)+e[2]+606105819|0,i=(i<<17|i>>>15)+a|0,n+=(i&a|~i&r)+e[3]-1044525330|0,n=(n<<22|n>>>10)+i|0,r+=(n&i|~n&a)+e[4]-176418897|0,r=(r<<7|r>>>25)+n|0,a+=(r&n|~r&i)+e[5]+1200080426|0,a=(a<<12|a>>>20)+r|0,i+=(a&r|~a&n)+e[6]-1473231341|0,i=(i<<17|i>>>15)+a|0,n+=(i&a|~i&r)+e[7]-45705983|0,n=(n<<22|n>>>10)+i|0,r+=(n&i|~n&a)+e[8]+1770035416|0,r=(r<<7|r>>>25)+n|0,a+=(r&n|~r&i)+e[9]-1958414417|0,a=(a<<12|a>>>20)+r|0,i+=(a&r|~a&n)+e[10]-42063|0,i=(i<<17|i>>>15)+a|0,n+=(i&a|~i&r)+e[11]-1990404162|0,n=(n<<22|n>>>10)+i|0,r+=(n&i|~n&a)+e[12]+1804603682|0,r=(r<<7|r>>>25)+n|0,a+=(r&n|~r&i)+e[13]-40341101|0,a=(a<<12|a>>>20)+r|0,i+=(a&r|~a&n)+e[14]-1502002290|0,i=(i<<17|i>>>15)+a|0,n+=(i&a|~i&r)+e[15]+1236535329|0,n=(n<<22|n>>>10)+i|0,r+=(n&a|i&~a)+e[1]-165796510|0,r=(r<<5|r>>>27)+n|0,a+=(r&i|n&~i)+e[6]-1069501632|0,a=(a<<9|a>>>23)+r|0,i+=(a&n|r&~n)+e[11]+643717713|0,i=(i<<14|i>>>18)+a|0,n+=(i&r|a&~r)+e[0]-373897302|0,n=(n<<20|n>>>12)+i|0,r+=(n&a|i&~a)+e[5]-701558691|0,r=(r<<5|r>>>27)+n|0,a+=(r&i|n&~i)+e[10]+38016083|0,a=(a<<9|a>>>23)+r|0,i+=(a&n|r&~n)+e[15]-660478335|0,i=(i<<14|i>>>18)+a|0,n+=(i&r|a&~r)+e[4]-405537848|0,n=(n<<20|n>>>12)+i|0,r+=(n&a|i&~a)+e[9]+568446438|0,r=(r<<5|r>>>27)+n|0,a+=(r&i|n&~i)+e[14]-1019803690|0,a=(a<<9|a>>>23)+r|0,i+=(a&n|r&~n)+e[3]-187363961|0,i=(i<<14|i>>>18)+a|0,n+=(i&r|a&~r)+e[8]+1163531501|0,n=(n<<20|n>>>12)+i|0,r+=(n&a|i&~a)+e[13]-1444681467|0,r=(r<<5|r>>>27)+n|0,a+=(r&i|n&~i)+e[2]-51403784|0,a=(a<<9|a>>>23)+r|0,i+=(a&n|r&~n)+e[7]+1735328473|0,i=(i<<14|i>>>18)+a|0,n+=(i&r|a&~r)+e[12]-1926607734|0,n=(n<<20|n>>>12)+i|0,r+=(n^i^a)+e[5]-378558|0,r=(r<<4|r>>>28)+n|0,a+=(r^n^i)+e[8]-2022574463|0,a=(a<<11|a>>>21)+r|0,i+=(a^r^n)+e[11]+1839030562|0,i=(i<<16|i>>>16)+a|0,n+=(i^a^r)+e[14]-35309556|0,n=(n<<23|n>>>9)+i|0,r+=(n^i^a)+e[1]-1530992060|0,r=(r<<4|r>>>28)+n|0,a+=(r^n^i)+e[4]+1272893353|0,a=(a<<11|a>>>21)+r|0,i+=(a^r^n)+e[7]-155497632|0,i=(i<<16|i>>>16)+a|0,n+=(i^a^r)+e[10]-1094730640|0,n=(n<<23|n>>>9)+i|0,r+=(n^i^a)+e[13]+681279174|0,r=(r<<4|r>>>28)+n|0,a+=(r^n^i)+e[0]-358537222|0,a=(a<<11|a>>>21)+r|0,i+=(a^r^n)+e[3]-722521979|0,i=(i<<16|i>>>16)+a|0,n+=(i^a^r)+e[6]+76029189|0,n=(n<<23|n>>>9)+i|0,r+=(n^i^a)+e[9]-640364487|0,r=(r<<4|r>>>28)+n|0,a+=(r^n^i)+e[12]-421815835|0,a=(a<<11|a>>>21)+r|0,i+=(a^r^n)+e[15]+530742520|0,i=(i<<16|i>>>16)+a|0,n+=(i^a^r)+e[2]-995338651|0,n=(n<<23|n>>>9)+i|0,r+=(i^(n|~a))+e[0]-198630844|0,r=(r<<6|r>>>26)+n|0,a+=(n^(r|~i))+e[7]+1126891415|0,a=(a<<10|a>>>22)+r|0,i+=(r^(a|~n))+e[14]-1416354905|0,i=(i<<15|i>>>17)+a|0,n+=(a^(i|~r))+e[5]-57434055|0,n=(n<<21|n>>>11)+i|0,r+=(i^(n|~a))+e[12]+1700485571|0,r=(r<<6|r>>>26)+n|0,a+=(n^(r|~i))+e[3]-1894986606|0,a=(a<<10|a>>>22)+r|0,i+=(r^(a|~n))+e[10]-1051523|0,i=(i<<15|i>>>17)+a|0,n+=(a^(i|~r))+e[1]-2054922799|0,n=(n<<21|n>>>11)+i|0,r+=(i^(n|~a))+e[8]+1873313359|0,r=(r<<6|r>>>26)+n|0,a+=(n^(r|~i))+e[15]-30611744|0,a=(a<<10|a>>>22)+r|0,i+=(r^(a|~n))+e[6]-1560198380|0,i=(i<<15|i>>>17)+a|0,n+=(a^(i|~r))+e[13]+1309151649|0,n=(n<<21|n>>>11)+i|0,r+=(i^(n|~a))+e[4]-145523070|0,r=(r<<6|r>>>26)+n|0,a+=(n^(r|~i))+e[11]-1120210379|0,a=(a<<10|a>>>22)+r|0,i+=(r^(a|~n))+e[2]+718787259|0,i=(i<<15|i>>>17)+a|0,n+=(a^(i|~r))+e[9]-343485551|0,n=(n<<21|n>>>11)+i|0,t[0]=r+t[0]|0,t[1]=n+t[1]|0,t[2]=i+t[2]|0,t[3]=a+t[3]|0}function r(t){var e,r=[];for(e=0;e<64;e+=4)r[e>>2]=t.charCodeAt(e)+(t.charCodeAt(e+1)<<8)+(t.charCodeAt(e+2)<<16)+(t.charCodeAt(e+3)<<24);return r}function n(t){var e,r=[];for(e=0;e<64;e+=4)r[e>>2]=t[e]+(t[e+1]<<8)+(t[e+2]<<16)+(t[e+3]<<24);return r}function i(t){var n,i,a,u,o,s,f=t.length,c=[1732584193,-271733879,-1732584194,271733878];for(n=64;n<=f;n+=64)e(c,r(t.substring(n-64,n)));for(t=t.substring(n-64),i=t.length,a=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],n=0;n<i;n+=1)a[n>>2]|=t.charCodeAt(n)<<(n%4<<3);if(a[n>>2]|=128<<(n%4<<3),n>55)for(e(c,a),n=0;n<16;n+=1)a[n]=0;return u=8*f,u=u.toString(16).match(/(.*?)(.{0,8})$/),o=parseInt(u[2],16),s=parseInt(u[1],16)||0,a[14]=o,a[15]=s,e(c,a),c}function a(t){var r,i,a,u,o,s,f=t.length,c=[1732584193,-271733879,-1732584194,271733878];for(r=64;r<=f;r+=64)e(c,n(t.subarray(r-64,r)));for(t=r-64<f?t.subarray(r-64):new Uint8Array(0),i=t.length,a=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],r=0;r<i;r+=1)a[r>>2]|=t[r]<<(r%4<<3);if(a[r>>2]|=128<<(r%4<<3),r>55)for(e(c,a),r=0;r<16;r+=1)a[r]=0;return u=8*f,u=u.toString(16).match(/(.*?)(.{0,8})$/),o=parseInt(u[2],16),s=parseInt(u[1],16)||0,a[14]=o,a[15]=s,e(c,a),c}function u(t){var e,r="";for(e=0;e<4;e+=1)r+=p[t>>8*e+4&15]+p[t>>8*e&15];return r}function o(t){var e;for(e=0;e<t.length;e+=1)t[e]=u(t[e]);return t.join("")}function s(t){return/[\u0080-\uFFFF]/.test(t)&&(t=unescape(encodeURIComponent(t))),t}function f(t,e){var r,n=t.length,i=new ArrayBuffer(n),a=new Uint8Array(i);for(r=0;r<n;r+=1)a[r]=t.charCodeAt(r);return e?a:i}function c(t){return String.fromCharCode.apply(null,new Uint8Array(t))}function h(t,e,r){var n=new Uint8Array(t.byteLength+e.byteLength);return n.set(new Uint8Array(t)),n.set(new Uint8Array(e),t.byteLength),r?n:n.buffer}function l(t){var e,r=[],n=t.length;for(e=0;e<n-1;e+=2)r.push(parseInt(t.substr(e,2),16));return String.fromCharCode.apply(String,r)}function d(){this.reset()}var p=["0","1","2","3","4","5","6","7","8","9","a","b","c","d","e","f"];return"5d41402abc4b2a76b9719d911017c592"!==o(i("hello"))&&function(t,e){var r=(65535&t)+(65535&e);return(t>>16)+(e>>16)+(r>>16)<<16|65535&r},"undefined"==typeof ArrayBuffer||ArrayBuffer.prototype.slice||function(){function e(t,e){return t=0|t||0,t<0?Math.max(t+e,0):Math.min(t,e)}ArrayBuffer.prototype.slice=function(r,n){var i,a,u,o,s=this.byteLength,f=e(r,s),c=s;return n!==t&&(c=e(n,s)),f>c?new ArrayBuffer(0):(i=c-f,a=new ArrayBuffer(i),u=new Uint8Array(a),o=new Uint8Array(this,f,i),u.set(o),a)}}(),d.prototype.append=function(t){return this.appendBinary(s(t)),this},d.prototype.appendBinary=function(t){this._buff+=t,this._length+=t.length;var n,i=this._buff.length;for(n=64;n<=i;n+=64)e(this._hash,r(this._buff.substring(n-64,n)));return this._buff=this._buff.substring(n-64),this},d.prototype.end=function(t){var e,r,n=this._buff,i=n.length,a=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0];for(e=0;e<i;e+=1)a[e>>2]|=n.charCodeAt(e)<<(e%4<<3);return this._finish(a,i),r=o(this._hash),t&&(r=l(r)),this.reset(),r},d.prototype.reset=function(){return this._buff="",this._length=0,this._hash=[1732584193,-271733879,-1732584194,271733878],this},d.prototype.getState=function(){return{buff:this._buff,length:this._length,hash:this._hash}},d.prototype.setState=function(t){return this._buff=t.buff,this._length=t.length,this._hash=t.hash,this},d.prototype.destroy=function(){delete this._hash,delete this._buff,delete this._length},d.prototype._finish=function(t,r){var n,i,a,u=r;if(t[u>>2]|=128<<(u%4<<3),u>55)for(e(this._hash,t),u=0;u<16;u+=1)t[u]=0;n=8*this._length,n=n.toString(16).match(/(.*?)(.{0,8})$/),i=parseInt(n[2],16),a=parseInt(n[1],16)||0,t[14]=i,t[15]=a,e(this._hash,t)},d.hash=function(t,e){return d.hashBinary(s(t),e)},d.hashBinary=function(t,e){var r=i(t),n=o(r);return e?l(n):n},d.ArrayBuffer=function(){this.reset()},d.ArrayBuffer.prototype.append=function(t){var r,i=h(this._buff.buffer,t,!0),a=i.length;for(this._length+=t.byteLength,r=64;r<=a;r+=64)e(this._hash,n(i.subarray(r-64,r)));return this._buff=r-64<a?new Uint8Array(i.buffer.slice(r-64)):new Uint8Array(0),this},d.ArrayBuffer.prototype.end=function(t){var e,r,n=this._buff,i=n.length,a=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0];for(e=0;e<i;e+=1)a[e>>2]|=n[e]<<(e%4<<3);return this._finish(a,i),r=o(this._hash),t&&(r=l(r)),this.reset(),r},d.ArrayBuffer.prototype.reset=function(){return this._buff=new Uint8Array(0),this._length=0,this._hash=[1732584193,-271733879,-1732584194,271733878],this},d.ArrayBuffer.prototype.getState=function(){var t=d.prototype.getState.call(this);return t.buff=c(t.buff),t},d.ArrayBuffer.prototype.setState=function(t){return t.buff=f(t.buff,!0),d.prototype.setState.call(this,t)},d.ArrayBuffer.prototype.destroy=d.prototype.destroy,d.ArrayBuffer.prototype._finish=d.prototype._finish,d.ArrayBuffer.hash=function(t,e){var r=a(new Uint8Array(t)),n=o(r);return e?l(n):n},d})},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return u});var i=r(0),a=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),u=function(){function t(e,r,a){var u=this;n(this,t),this.file=e,this.attributes={filename:e.name,content_type:e.type,byte_size:e.size,checksum:r},this.xhr=new XMLHttpRequest,this.xhr.open("POST",a,!0),this.xhr.responseType="json",this.xhr.setRequestHeader("Content-Type","application/json"),this.xhr.setRequestHeader("Accept","application/json"),this.xhr.setRequestHeader("X-Requested-With","XMLHttpRequest"),this.xhr.setRequestHeader("X-CSRF-Token",Object(i.d)("csrf-token")),this.xhr.addEventListener("load",function(t){return u.requestDidLoad(t)}),this.xhr.addEventListener("error",function(t){return u.requestDidError(t)})}return a(t,[{key:"create",value:function(t){this.callback=t,this.xhr.send(JSON.stringify({blob:this.attributes}))}},{key:"requestDidLoad",value:function(t){var e=this.xhr,r=e.status,n=e.response;if(r>=200&&r<300){var i=n.direct_upload;delete n.direct_upload,this.attributes=n,this.directUploadData=i,this.callback(null,this.toJSON())}else this.requestDidError(t)}},{key:"requestDidError",value:function(t){this.callback('Error creating Blob for "'+this.file.name+'". Status: '+this.xhr.status)}},{key:"toJSON",value:function(){var t={};for(var e in this.attributes)t[e]=this.attributes[e];return t}}]),t}()},function(t,e,r){"use strict";function n(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}r.d(e,"a",function(){return a});var i=function(){function t(t,e){for(var r=0;r<e.length;r++){var n=e[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,n.key,n)}}return function(e,r,n){return r&&t(e.prototype,r),n&&t(e,n),e}}(),a=function(){function t(e){var r=this;n(this,t),this.blob=e,this.file=e.file;var i=e.directUploadData,a=i.url,u=i.headers;this.xhr=new XMLHttpRequest,this.xhr.open("PUT",a,!0),this.xhr.responseType="text";for(var o in u)this.xhr.setRequestHeader(o,u[o]);this.xhr.addEventListener("load",function(t){return r.requestDidLoad(t)}),this.xhr.addEventListener("error",function(t){return r.requestDidError(t)})}return i(t,[{key:"create",value:function(t){this.callback=t,this.xhr.send(this.file)}},{key:"requestDidLoad",value:function(t){var e=this.xhr,r=e.status,n=e.response;r>=200&&r<300?this.callback(null,n):this.requestDidError(t)}},{key:"requestDidError",value:function(t){this.callback('Error storing "'+this.file.name+'". Status: '+this.xhr.status)}}]),t}()}])});
\ No newline at end of file diff --git a/activestorage/app/controllers/active_storage/blobs_controller.rb b/activestorage/app/controllers/active_storage/blobs_controller.rb new file mode 100644 index 0000000000..a17e3852f9 --- /dev/null +++ b/activestorage/app/controllers/active_storage/blobs_controller.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +# Take a signed permanent reference for a blob and turn it into an expiring service URL for download. +# Note: These URLs are publicly accessible. If you need to enforce access protection beyond the +# security-through-obscurity factor of the signed blob references, you'll need to implement your own +# authenticated redirection controller. +class ActiveStorage::BlobsController < ActionController::Base + def show + if blob = ActiveStorage::Blob.find_signed(params[:signed_id]) + expires_in ActiveStorage::Blob.service.url_expires_in + redirect_to blob.service_url(disposition: params[:disposition]) + else + head :not_found + end + end +end diff --git a/activestorage/app/controllers/active_storage/direct_uploads_controller.rb b/activestorage/app/controllers/active_storage/direct_uploads_controller.rb new file mode 100644 index 0000000000..205d173648 --- /dev/null +++ b/activestorage/app/controllers/active_storage/direct_uploads_controller.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +# Creates a new blob on the server side in anticipation of a direct-to-service upload from the client side. +# When the client-side upload is completed, the signed_blob_id can be submitted as part of the form to reference +# the blob that was created up front. +class ActiveStorage::DirectUploadsController < ActionController::Base + def create + blob = ActiveStorage::Blob.create_before_direct_upload!(blob_args) + render json: direct_upload_json(blob) + end + + private + def blob_args + params.require(:blob).permit(:filename, :byte_size, :checksum, :content_type, :metadata).to_h.symbolize_keys + end + + def direct_upload_json(blob) + blob.as_json(methods: :signed_id).merge(direct_upload: { + url: blob.service_url_for_direct_upload, + headers: blob.service_headers_for_direct_upload + }) + end +end diff --git a/activestorage/app/controllers/active_storage/disk_controller.rb b/activestorage/app/controllers/active_storage/disk_controller.rb new file mode 100644 index 0000000000..a4fd427cb2 --- /dev/null +++ b/activestorage/app/controllers/active_storage/disk_controller.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +# Serves files stored with the disk service in the same way that the cloud services do. +# This means using expiring, signed URLs that are meant for immediate access, not permanent linking. +# Always go through the BlobsController, or your own authenticated controller, rather than directly +# to the service url. +class ActiveStorage::DiskController < ActionController::Base + def show + if key = decode_verified_key + send_data disk_service.download(key), + disposition: params[:disposition], content_type: params[:content_type] + else + head :not_found + end + end + + def update + if token = decode_verified_token + if acceptable_content?(token) + disk_service.upload token[:key], request.body, checksum: token[:checksum] + else + head :unprocessable_entity + end + else + head :not_found + end + rescue ActiveStorage::IntegrityError + head :unprocessable_entity + end + + private + def disk_service + ActiveStorage::Blob.service + end + + + def decode_verified_key + ActiveStorage.verifier.verified(params[:encoded_key], purpose: :blob_key) + end + + + def decode_verified_token + ActiveStorage.verifier.verified(params[:encoded_token], purpose: :blob_token) + end + + def acceptable_content?(token) + token[:content_type] == request.content_type && token[:content_length] == request.content_length + end +end diff --git a/activestorage/app/controllers/active_storage/previews_controller.rb b/activestorage/app/controllers/active_storage/previews_controller.rb new file mode 100644 index 0000000000..9e8cf27b6e --- /dev/null +++ b/activestorage/app/controllers/active_storage/previews_controller.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +class ActiveStorage::PreviewsController < ActionController::Base + def show + if blob = ActiveStorage::Blob.find_signed(params[:signed_blob_id]) + expires_in ActiveStorage::Blob.service.url_expires_in + redirect_to ActiveStorage::Preview.new(blob, params[:variation_key]).processed.service_url(disposition: params[:disposition]) + else + head :not_found + end + end +end diff --git a/activestorage/app/controllers/active_storage/variants_controller.rb b/activestorage/app/controllers/active_storage/variants_controller.rb new file mode 100644 index 0000000000..dc5e78ecc0 --- /dev/null +++ b/activestorage/app/controllers/active_storage/variants_controller.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +# Take a signed permanent reference for a variant and turn it into an expiring service URL for download. +# Note: These URLs are publicly accessible. If you need to enforce access protection beyond the +# security-through-obscurity factor of the signed blob and variation reference, you'll need to implement your own +# authenticated redirection controller. +class ActiveStorage::VariantsController < ActionController::Base + def show + if blob = ActiveStorage::Blob.find_signed(params[:signed_blob_id]) + expires_in ActiveStorage::Blob.service.url_expires_in + redirect_to ActiveStorage::Variant.new(blob, params[:variation_key]).processed.service_url(disposition: params[:disposition]) + else + head :not_found + end + end +end diff --git a/activestorage/app/javascript/activestorage/blob_record.js b/activestorage/app/javascript/activestorage/blob_record.js new file mode 100644 index 0000000000..3c6e6b6ba1 --- /dev/null +++ b/activestorage/app/javascript/activestorage/blob_record.js @@ -0,0 +1,54 @@ +import { getMetaValue } from "./helpers" + +export class BlobRecord { + constructor(file, checksum, url) { + this.file = file + + this.attributes = { + filename: file.name, + content_type: file.type, + byte_size: file.size, + checksum: checksum + } + + this.xhr = new XMLHttpRequest + this.xhr.open("POST", url, true) + this.xhr.responseType = "json" + this.xhr.setRequestHeader("Content-Type", "application/json") + this.xhr.setRequestHeader("Accept", "application/json") + this.xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest") + this.xhr.setRequestHeader("X-CSRF-Token", getMetaValue("csrf-token")) + this.xhr.addEventListener("load", event => this.requestDidLoad(event)) + this.xhr.addEventListener("error", event => this.requestDidError(event)) + } + + create(callback) { + this.callback = callback + this.xhr.send(JSON.stringify({ blob: this.attributes })) + } + + requestDidLoad(event) { + const { status, response } = this.xhr + if (status >= 200 && status < 300) { + const { direct_upload } = response + delete response.direct_upload + this.attributes = response + this.directUploadData = direct_upload + this.callback(null, this.toJSON()) + } else { + this.requestDidError(event) + } + } + + requestDidError(event) { + this.callback(`Error creating Blob for "${this.file.name}". Status: ${this.xhr.status}`) + } + + toJSON() { + const result = {} + for (const key in this.attributes) { + result[key] = this.attributes[key] + } + return result + } +} diff --git a/activestorage/app/javascript/activestorage/blob_upload.js b/activestorage/app/javascript/activestorage/blob_upload.js new file mode 100644 index 0000000000..180a7415e7 --- /dev/null +++ b/activestorage/app/javascript/activestorage/blob_upload.js @@ -0,0 +1,35 @@ +export class BlobUpload { + constructor(blob) { + this.blob = blob + this.file = blob.file + + const { url, headers } = blob.directUploadData + + this.xhr = new XMLHttpRequest + this.xhr.open("PUT", url, true) + this.xhr.responseType = "text" + for (const key in headers) { + this.xhr.setRequestHeader(key, headers[key]) + } + this.xhr.addEventListener("load", event => this.requestDidLoad(event)) + this.xhr.addEventListener("error", event => this.requestDidError(event)) + } + + create(callback) { + this.callback = callback + this.xhr.send(this.file) + } + + requestDidLoad(event) { + const { status, response } = this.xhr + if (status >= 200 && status < 300) { + this.callback(null, response) + } else { + this.requestDidError(event) + } + } + + requestDidError(event) { + this.callback(`Error storing "${this.file.name}". Status: ${this.xhr.status}`) + } +} diff --git a/activestorage/app/javascript/activestorage/direct_upload.js b/activestorage/app/javascript/activestorage/direct_upload.js new file mode 100644 index 0000000000..7085e0a4ab --- /dev/null +++ b/activestorage/app/javascript/activestorage/direct_upload.js @@ -0,0 +1,42 @@ +import { FileChecksum } from "./file_checksum" +import { BlobRecord } from "./blob_record" +import { BlobUpload } from "./blob_upload" + +let id = 0 + +export class DirectUpload { + constructor(file, url, delegate) { + this.id = ++id + this.file = file + this.url = url + this.delegate = delegate + } + + create(callback) { + FileChecksum.create(this.file, (error, checksum) => { + const blob = new BlobRecord(this.file, checksum, this.url) + notify(this.delegate, "directUploadWillCreateBlobWithXHR", blob.xhr) + blob.create(error => { + if (error) { + callback(error) + } else { + const upload = new BlobUpload(blob) + notify(this.delegate, "directUploadWillStoreFileWithXHR", upload.xhr) + upload.create(error => { + if (error) { + callback(error) + } else { + callback(null, blob.toJSON()) + } + }) + } + }) + }) + } +} + +function notify(object, methodName, ...messages) { + if (object && typeof object[methodName] == "function") { + return object[methodName](...messages) + } +} diff --git a/activestorage/app/javascript/activestorage/direct_upload_controller.js b/activestorage/app/javascript/activestorage/direct_upload_controller.js new file mode 100644 index 0000000000..987050889a --- /dev/null +++ b/activestorage/app/javascript/activestorage/direct_upload_controller.js @@ -0,0 +1,67 @@ +import { DirectUpload } from "./direct_upload" +import { dispatchEvent } from "./helpers" + +export class DirectUploadController { + constructor(input, file) { + this.input = input + this.file = file + this.directUpload = new DirectUpload(this.file, this.url, this) + this.dispatch("initialize") + } + + start(callback) { + const hiddenInput = document.createElement("input") + hiddenInput.type = "hidden" + hiddenInput.name = this.input.name + this.input.insertAdjacentElement("beforebegin", hiddenInput) + + this.dispatch("start") + + this.directUpload.create((error, attributes) => { + if (error) { + hiddenInput.parentNode.removeChild(hiddenInput) + this.dispatchError(error) + } else { + hiddenInput.value = attributes.signed_id + } + + this.dispatch("end") + callback(error) + }) + } + + uploadRequestDidProgress(event) { + const progress = event.loaded / event.total * 100 + if (progress) { + this.dispatch("progress", { progress }) + } + } + + get url() { + return this.input.getAttribute("data-direct-upload-url") + } + + dispatch(name, detail = {}) { + detail.file = this.file + detail.id = this.directUpload.id + return dispatchEvent(this.input, `direct-upload:${name}`, { detail }) + } + + dispatchError(error) { + const event = this.dispatch("error", { error }) + if (!event.defaultPrevented) { + alert(error) + } + } + + // DirectUpload delegate + + directUploadWillCreateBlobWithXHR(xhr) { + this.dispatch("before-blob-request", { xhr }) + } + + directUploadWillStoreFileWithXHR(xhr) { + this.dispatch("before-storage-request", { xhr }) + xhr.upload.addEventListener("progress", event => this.uploadRequestDidProgress(event)) + } +} diff --git a/activestorage/app/javascript/activestorage/direct_uploads_controller.js b/activestorage/app/javascript/activestorage/direct_uploads_controller.js new file mode 100644 index 0000000000..94b89c9119 --- /dev/null +++ b/activestorage/app/javascript/activestorage/direct_uploads_controller.js @@ -0,0 +1,50 @@ +import { DirectUploadController } from "./direct_upload_controller" +import { findElements, dispatchEvent, toArray } from "./helpers" + +const inputSelector = "input[type=file][data-direct-upload-url]:not([disabled])" + +export class DirectUploadsController { + constructor(form) { + this.form = form + this.inputs = findElements(form, inputSelector).filter(input => input.files.length) + } + + start(callback) { + const controllers = this.createDirectUploadControllers() + + const startNextController = () => { + const controller = controllers.shift() + if (controller) { + controller.start(error => { + if (error) { + callback(error) + this.dispatch("end") + } else { + startNextController() + } + }) + } else { + callback() + this.dispatch("end") + } + } + + this.dispatch("start") + startNextController() + } + + createDirectUploadControllers() { + const controllers = [] + this.inputs.forEach(input => { + toArray(input.files).forEach(file => { + const controller = new DirectUploadController(input, file) + controllers.push(controller) + }) + }) + return controllers + } + + dispatch(name, detail = {}) { + return dispatchEvent(this.form, `direct-uploads:${name}`, { detail }) + } +} diff --git a/activestorage/app/javascript/activestorage/file_checksum.js b/activestorage/app/javascript/activestorage/file_checksum.js new file mode 100644 index 0000000000..ffaec1a128 --- /dev/null +++ b/activestorage/app/javascript/activestorage/file_checksum.js @@ -0,0 +1,53 @@ +import SparkMD5 from "spark-md5" + +const fileSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice + +export class FileChecksum { + static create(file, callback) { + const instance = new FileChecksum(file) + instance.create(callback) + } + + constructor(file) { + this.file = file + this.chunkSize = 2097152 // 2MB + this.chunkCount = Math.ceil(this.file.size / this.chunkSize) + this.chunkIndex = 0 + } + + create(callback) { + this.callback = callback + this.md5Buffer = new SparkMD5.ArrayBuffer + this.fileReader = new FileReader + this.fileReader.addEventListener("load", event => this.fileReaderDidLoad(event)) + this.fileReader.addEventListener("error", event => this.fileReaderDidError(event)) + this.readNextChunk() + } + + fileReaderDidLoad(event) { + this.md5Buffer.append(event.target.result) + + if (!this.readNextChunk()) { + const binaryDigest = this.md5Buffer.end(true) + const base64digest = btoa(binaryDigest) + this.callback(null, base64digest) + } + } + + fileReaderDidError(event) { + this.callback(`Error reading ${this.file.name}`) + } + + readNextChunk() { + if (this.chunkIndex < this.chunkCount) { + const start = this.chunkIndex * this.chunkSize + const end = Math.min(start + this.chunkSize, this.file.size) + const bytes = fileSlice.call(this.file, start, end) + this.fileReader.readAsArrayBuffer(bytes) + this.chunkIndex++ + return true + } else { + return false + } + } +} diff --git a/activestorage/app/javascript/activestorage/helpers.js b/activestorage/app/javascript/activestorage/helpers.js new file mode 100644 index 0000000000..52fec8f6f1 --- /dev/null +++ b/activestorage/app/javascript/activestorage/helpers.js @@ -0,0 +1,42 @@ +export function getMetaValue(name) { + const element = findElement(document.head, `meta[name="${name}"]`) + if (element) { + return element.getAttribute("content") + } +} + +export function findElements(root, selector) { + if (typeof root == "string") { + selector = root + root = document + } + const elements = root.querySelectorAll(selector) + return toArray(elements) +} + +export function findElement(root, selector) { + if (typeof root == "string") { + selector = root + root = document + } + return root.querySelector(selector) +} + +export function dispatchEvent(element, type, eventInit = {}) { + const { bubbles, cancelable, detail } = eventInit + const event = document.createEvent("Event") + event.initEvent(type, bubbles || true, cancelable || true) + event.detail = detail || {} + element.dispatchEvent(event) + return event +} + +export function toArray(value) { + if (Array.isArray(value)) { + return value + } else if (Array.from) { + return Array.from(value) + } else { + return [].slice.call(value) + } +} diff --git a/activestorage/app/javascript/activestorage/index.js b/activestorage/app/javascript/activestorage/index.js new file mode 100644 index 0000000000..a340008fb9 --- /dev/null +++ b/activestorage/app/javascript/activestorage/index.js @@ -0,0 +1,11 @@ +import { start } from "./ujs" +import { DirectUpload } from "./direct_upload" +export { start, DirectUpload } + +function autostart() { + if (window.ActiveStorage) { + start() + } +} + +setTimeout(autostart, 1) diff --git a/activestorage/app/javascript/activestorage/ujs.js b/activestorage/app/javascript/activestorage/ujs.js new file mode 100644 index 0000000000..1dda02936f --- /dev/null +++ b/activestorage/app/javascript/activestorage/ujs.js @@ -0,0 +1,75 @@ +import { DirectUploadsController } from "./direct_uploads_controller" +import { findElement } from "./helpers" + +const processingAttribute = "data-direct-uploads-processing" +let started = false + +export function start() { + if (!started) { + started = true + document.addEventListener("submit", didSubmitForm) + document.addEventListener("ajax:before", didSubmitRemoteElement) + } +} + +function didSubmitForm(event) { + handleFormSubmissionEvent(event) +} + +function didSubmitRemoteElement(event) { + if (event.target.tagName == "FORM") { + handleFormSubmissionEvent(event) + } +} + +function handleFormSubmissionEvent(event) { + const form = event.target + + if (form.hasAttribute(processingAttribute)) { + event.preventDefault() + return + } + + const controller = new DirectUploadsController(form) + const { inputs } = controller + + if (inputs.length) { + event.preventDefault() + form.setAttribute(processingAttribute, "") + inputs.forEach(disable) + controller.start(error => { + form.removeAttribute(processingAttribute) + if (error) { + inputs.forEach(enable) + } else { + submitForm(form) + } + }) + } +} + +function submitForm(form) { + let button = findElement(form, "input[type=submit]") + if (button) { + const { disabled } = button + button.disabled = false + button.focus() + button.click() + button.disabled = disabled + } else { + button = document.createElement("input") + button.type = "submit" + button.style = "display:none" + form.appendChild(button) + button.click() + form.removeChild(button) + } +} + +function disable(input) { + input.disabled = true +} + +function enable(input) { + input.disabled = false +} diff --git a/activestorage/app/jobs/active_storage/analyze_job.rb b/activestorage/app/jobs/active_storage/analyze_job.rb new file mode 100644 index 0000000000..2a952f9f74 --- /dev/null +++ b/activestorage/app/jobs/active_storage/analyze_job.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +# Provides asynchronous analysis of ActiveStorage::Blob records via ActiveStorage::Blob#analyze_later. +class ActiveStorage::AnalyzeJob < ActiveStorage::BaseJob + def perform(blob) + blob.analyze + end +end diff --git a/activestorage/app/jobs/active_storage/base_job.rb b/activestorage/app/jobs/active_storage/base_job.rb new file mode 100644 index 0000000000..6caab42a2d --- /dev/null +++ b/activestorage/app/jobs/active_storage/base_job.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class ActiveStorage::BaseJob < ActiveJob::Base + queue_as { ActiveStorage.queue } +end diff --git a/activestorage/app/jobs/active_storage/purge_job.rb b/activestorage/app/jobs/active_storage/purge_job.rb new file mode 100644 index 0000000000..98874d2250 --- /dev/null +++ b/activestorage/app/jobs/active_storage/purge_job.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +# Provides asynchronous purging of ActiveStorage::Blob records via ActiveStorage::Blob#purge_later. +class ActiveStorage::PurgeJob < ActiveStorage::BaseJob + # FIXME: Limit this to a custom ActiveStorage error + retry_on StandardError + + def perform(blob) + blob.purge + end +end diff --git a/activestorage/app/models/active_storage/attachment.rb b/activestorage/app/models/active_storage/attachment.rb new file mode 100644 index 0000000000..9f61a5dbf3 --- /dev/null +++ b/activestorage/app/models/active_storage/attachment.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require "active_support/core_ext/module/delegation" + +# Attachments associate records with blobs. Usually that's a one record-many blobs relationship, +# but it is possible to associate many different records with the same blob. If you're doing that, +# you'll want to declare with <tt>has_one/many_attached :thingy, dependent: false</tt>, so that destroying +# any one record won't destroy the blob as well. (Then you'll need to do your own garbage collecting, though). +class ActiveStorage::Attachment < ActiveRecord::Base + self.table_name = "active_storage_attachments" + + belongs_to :record, polymorphic: true, touch: true + belongs_to :blob, class_name: "ActiveStorage::Blob" + + delegate_missing_to :blob + + after_create_commit :analyze_blob_later + + # Synchronously purges the blob (deletes it from the configured service) and destroys the attachment. + def purge + blob.purge + destroy + end + + # Destroys the attachment and asynchronously purges the blob (deletes it from the configured service). + def purge_later + blob.purge_later + destroy + end + + private + def analyze_blob_later + blob.analyze_later unless blob.analyzed? + end +end diff --git a/activestorage/app/models/active_storage/blob.rb b/activestorage/app/models/active_storage/blob.rb new file mode 100644 index 0000000000..2aa05d665e --- /dev/null +++ b/activestorage/app/models/active_storage/blob.rb @@ -0,0 +1,313 @@ +# frozen_string_literal: true + +require "active_storage/analyzer/null_analyzer" + +# A blob is a record that contains the metadata about a file and a key for where that file resides on the service. +# Blobs can be created in two ways: +# +# 1. Subsequent to the file being uploaded server-side to the service via <tt>create_after_upload!</tt>. +# 2. Ahead of the file being directly uploaded client-side to the service via <tt>create_before_direct_upload!</tt>. +# +# The first option doesn't require any client-side JavaScript integration, and can be used by any other back-end +# service that deals with files. The second option is faster, since you're not using your own server as a staging +# point for uploads, and can work with deployments like Heroku that do not provide large amounts of disk space. +# +# Blobs are intended to be immutable in as-so-far as their reference to a specific file goes. You're allowed to +# update a blob's metadata on a subsequent pass, but you should not update the key or change the uploaded file. +# If you need to create a derivative or otherwise change the blob, simply create a new blob and purge the old one. +class ActiveStorage::Blob < ActiveRecord::Base + class UnpreviewableError < StandardError; end + class UnrepresentableError < StandardError; end + + self.table_name = "active_storage_blobs" + + has_secure_token :key + store :metadata, accessors: [ :analyzed ], coder: JSON + + class_attribute :service + + has_many :attachments + + has_one_attached :preview_image + + class << self + # You can used the signed ID of a blob to refer to it on the client side without fear of tampering. + # This is particularly helpful for direct uploads where the client-side needs to refer to the blob + # that was created ahead of the upload itself on form submission. + # + # The signed ID is also used to create stable URLs for the blob through the BlobsController. + def find_signed(id) + find ActiveStorage.verifier.verify(id, purpose: :blob_id) + end + + # Returns a new, unsaved blob instance after the +io+ has been uploaded to the service. + def build_after_upload(io:, filename:, content_type: nil, metadata: nil) + new.tap do |blob| + blob.filename = filename + blob.content_type = content_type + blob.metadata = metadata + + blob.upload io + end + end + + # Returns a saved blob instance after the +io+ has been uploaded to the service. Note, the blob is first built, + # then the +io+ is uploaded, then the blob is saved. This is done this way to avoid uploading (which may take + # time), while having an open database transaction. + def create_after_upload!(io:, filename:, content_type: nil, metadata: nil) + build_after_upload(io: io, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!) + end + + # Returns a saved blob _without_ uploading a file to the service. This blob will point to a key where there is + # no file yet. It's intended to be used together with a client-side upload, which will first create the blob + # in order to produce the signed URL for uploading. This signed URL points to the key generated by the blob. + # Once the form using the direct upload is submitted, the blob can be associated with the right record using + # the signed ID. + def create_before_direct_upload!(filename:, byte_size:, checksum:, content_type: nil, metadata: nil) + create! filename: filename, byte_size: byte_size, checksum: checksum, content_type: content_type, metadata: metadata + end + end + + + # Returns a signed ID for this blob that's suitable for reference on the client-side without fear of tampering. + # It uses the framework-wide verifier on <tt>ActiveStorage.verifier</tt>, but with a dedicated purpose. + def signed_id + ActiveStorage.verifier.generate(id, purpose: :blob_id) + end + + # Returns the key pointing to the file on the service that's associated with this blob. The key is in the + # standard secure-token format from Rails. So it'll look like: XTAPjJCJiuDrLk3TmwyJGpUo. This key is not intended + # to be revealed directly to the user. Always refer to blobs using the signed_id or a verified form of the key. + def key + # We can't wait until the record is first saved to have a key for it + self[:key] ||= self.class.generate_unique_secure_token + end + + # Returns an ActiveStorage::Filename instance of the filename that can be + # queried for basename, extension, and a sanitized version of the filename + # that's safe to use in URLs. + def filename + ActiveStorage::Filename.new(self[:filename]) + end + + # Returns true if the content_type of this blob is in the image range, like image/png. + def image? + content_type.start_with?("image") + end + + # Returns true if the content_type of this blob is in the audio range, like audio/mpeg. + def audio? + content_type.start_with?("audio") + end + + # Returns true if the content_type of this blob is in the video range, like video/mp4. + def video? + content_type.start_with?("video") + end + + # Returns true if the content_type of this blob is in the text range, like text/plain. + def text? + content_type.start_with?("text") + end + + # Returns an ActiveStorage::Variant instance with the set of +transformations+ provided. This is only relevant for image + # files, and it allows any image to be transformed for size, colors, and the like. Example: + # + # avatar.variant(resize: "100x100").processed.service_url + # + # This will create and process a variant of the avatar blob that's constrained to a height and width of 100px. + # Then it'll upload said variant to the service according to a derivative key of the blob and the transformations. + # + # Frequently, though, you don't actually want to transform the variant right away. But rather simply refer to a + # specific variant that can be created by a controller on-demand. Like so: + # + # <%= image_tag Current.user.avatar.variant(resize: "100x100") %> + # + # This will create a URL for that specific blob with that specific variant, which the ActiveStorage::VariantsController + # can then produce on-demand. + def variant(transformations) + ActiveStorage::Variant.new(self, ActiveStorage::Variation.wrap(transformations)) + end + + + # Returns an ActiveStorage::Preview instance with the set of +transformations+ provided. A preview is an image generated + # from a non-image blob. Active Storage comes with built-in previewers for videos and PDF documents. The video previewer + # extracts the first frame from a video and the PDF previewer extracts the first page from a PDF document. + # + # blob.preview(resize: "100x100").processed.service_url + # + # Avoid processing previews synchronously in views. Instead, link to a controller action that processes them on demand. + # Active Storage provides one, but you may want to create your own (for example, if you need authentication). Here’s + # how to use the built-in version: + # + # <%= image_tag video.preview(resize: "100x100") %> + # + # This method raises ActiveStorage::Blob::UnpreviewableError if no previewer accepts the receiving blob. To determine + # whether a blob is accepted by any previewer, call ActiveStorage::Blob#previewable?. + def preview(transformations) + if previewable? + ActiveStorage::Preview.new(self, ActiveStorage::Variation.wrap(transformations)) + else + raise UnpreviewableError + end + end + + # Returns true if any registered previewer accepts the blob. By default, this will return true for videos and PDF documents. + def previewable? + ActiveStorage.previewers.any? { |klass| klass.accept?(self) } + end + + + # Returns an ActiveStorage::Preview instance for a previewable blob or an ActiveStorage::Variant instance for an image blob. + # + # blob.representation(resize: "100x100").processed.service_url + # + # Raises ActiveStorage::Blob::UnrepresentableError if the receiving blob is neither an image nor previewable. Call + # ActiveStorage::Blob#representable? to determine whether a blob is representable. + # + # See ActiveStorage::Blob#preview and ActiveStorage::Blob#variant for more information. + def representation(transformations) + case + when previewable? + preview transformations + when image? + variant transformations + else + raise UnrepresentableError + end + end + + # Returns true if the blob is an image or is previewable. + def representable? + image? || previewable? + end + + + # Returns the URL of the blob on the service. This URL is intended to be short-lived for security and not used directly + # with users. Instead, the +service_url+ should only be exposed as a redirect from a stable, possibly authenticated URL. + # Hiding the +service_url+ behind a redirect also gives you the power to change services without updating all URLs. And + # it allows permanent URLs that redirect to the +service_url+ to be cached in the view. + def service_url(expires_in: service.url_expires_in, disposition: "inline") + service.url key, expires_in: expires_in, disposition: disposition, filename: filename, content_type: content_type + end + + # Returns a URL that can be used to directly upload a file for this blob on the service. This URL is intended to be + # short-lived for security and only generated on-demand by the client-side JavaScript responsible for doing the uploading. + def service_url_for_direct_upload(expires_in: service.url_expires_in) + service.url_for_direct_upload key, expires_in: expires_in, content_type: content_type, content_length: byte_size, checksum: checksum + end + + # Returns a Hash of headers for +service_url_for_direct_upload+ requests. + def service_headers_for_direct_upload + service.headers_for_direct_upload key, filename: filename, content_type: content_type, content_length: byte_size, checksum: checksum + end + + # Uploads the +io+ to the service on the +key+ for this blob. Blobs are intended to be immutable, so you shouldn't be + # using this method after a file has already been uploaded to fit with a blob. If you want to create a derivative blob, + # you should instead simply create a new blob based on the old one. + # + # Prior to uploading, we compute the checksum, which is sent to the service for transit integrity validation. If the + # checksum does not match what the service receives, an exception will be raised. We also measure the size of the +io+ + # and store that in +byte_size+ on the blob record. + # + # Normally, you do not have to call this method directly at all. Use the factory class methods of +build_after_upload+ + # and +create_after_upload!+. + def upload(io) + self.checksum = compute_checksum_in_chunks(io) + self.byte_size = io.size + + service.upload(key, io, checksum: checksum) + end + + # Downloads the file associated with this blob. If no block is given, the entire file is read into memory and returned. + # That'll use a lot of RAM for very large files. If a block is given, then the download is streamed and yielded in chunks. + def download(&block) + service.download key, &block + end + + + # Extracts and stores metadata from the file associated with this blob using a relevant analyzer. Active Storage comes + # with built-in analyzers for images and videos. See ActiveStorage::Analyzer::ImageAnalyzer and + # ActiveStorage::Analyzer::VideoAnalyzer for information about the specific attributes they extract and the third-party + # libraries they require. + # + # To choose the analyzer for a blob, Active Storage calls +accept?+ on each registered analyzer in order. It uses the + # first analyzer for which +accept?+ returns true when given the blob. If no registered analyzer accepts the blob, no + # metadata is extracted from it. + # + # In a Rails application, add or remove analyzers by manipulating +Rails.application.config.active_storage.analyzers+ + # in an initializer: + # + # # Add a custom analyzer for Microsoft Office documents: + # Rails.application.config.active_storage.analyzers.append DOCXAnalyzer + # + # # Remove the built-in video analyzer: + # Rails.application.config.active_storage.analyzers.delete ActiveStorage::Analyzer::VideoAnalyzer + # + # Outside of a Rails application, manipulate +ActiveStorage.analyzers+ instead. + # + # You won't ordinarily need to call this method from a Rails application. New blobs are automatically and asynchronously + # analyzed via #analyze_later when they're attached for the first time. + def analyze + update! metadata: metadata.merge(extract_metadata_via_analyzer) + end + + # Enqueues an ActiveStorage::AnalyzeJob which calls #analyze. + # + # This method is automatically called for a blob when it's attached for the first time. You can call it to analyze a blob + # again (e.g. if you add a new analyzer or modify an existing one). + def analyze_later + ActiveStorage::AnalyzeJob.perform_later(self) + end + + # Returns true if the blob has been analyzed. + def analyzed? + analyzed + end + + + # Deletes the file on the service that's associated with this blob. This should only be done if the blob is going to be + # deleted as well or you will essentially have a dead reference. It's recommended to use the +#purge+ and +#purge_later+ + # methods in most circumstances. + def delete + service.delete key + end + + # Deletes the file on the service and then destroys the blob record. This is the recommended way to dispose of unwanted + # blobs. Note, though, that deleting the file off the service will initiate a HTTP connection to the service, which may + # be slow or prevented, so you should not use this method inside a transaction or in callbacks. Use +#purge_later+ instead. + def purge + delete + destroy + end + + # Enqueues an ActiveStorage::PurgeJob job that'll call +purge+. This is the recommended way to purge blobs when the call + # needs to be made from a transaction, a callback, or any other real-time scenario. + def purge_later + ActiveStorage::PurgeJob.perform_later(self) + end + + private + def compute_checksum_in_chunks(io) + Digest::MD5.new.tap do |checksum| + while chunk = io.read(5.megabytes) + checksum << chunk + end + + io.rewind + end.base64digest + end + + + def extract_metadata_via_analyzer + analyzer.metadata.merge(analyzed: true) + end + + def analyzer + analyzer_class.new(self) + end + + def analyzer_class + ActiveStorage.analyzers.detect { |klass| klass.accept?(self) } || ActiveStorage::Analyzer::NullAnalyzer + end +end diff --git a/activestorage/app/models/active_storage/filename.rb b/activestorage/app/models/active_storage/filename.rb new file mode 100644 index 0000000000..79d55dc889 --- /dev/null +++ b/activestorage/app/models/active_storage/filename.rb @@ -0,0 +1,73 @@ +# frozen_string_literal: true + +# Encapsulates a string representing a filename to provide convenient access to parts of it and sanitization. +# A Filename instance is returned by ActiveStorage::Blob#filename, and is comparable so it can be used for sorting. +class ActiveStorage::Filename + include Comparable + + def initialize(filename) + @filename = filename + end + + # Returns the part of the filename preceding any extension. + # + # ActiveStorage::Filename.new("racecar.jpg").base # => "racecar" + # ActiveStorage::Filename.new("racecar").base # => "racecar" + # ActiveStorage::Filename.new(".gitignore").base # => ".gitignore" + def base + File.basename @filename, extension_with_delimiter + end + + # Returns the extension of the filename (i.e. the substring following the last dot, excluding a dot at the + # beginning) with the dot that precedes it. If the filename has no extension, an empty string is returned. + # + # ActiveStorage::Filename.new("racecar.jpg").extension_with_delimiter # => ".jpg" + # ActiveStorage::Filename.new("racecar").extension_with_delimiter # => "" + # ActiveStorage::Filename.new(".gitignore").extension_with_delimiter # => "" + def extension_with_delimiter + File.extname @filename + end + + # Returns the extension of the filename (i.e. the substring following the last dot, excluding a dot at + # the beginning). If the filename has no extension, an empty string is returned. + # + # ActiveStorage::Filename.new("racecar.jpg").extension_without_delimiter # => "jpg" + # ActiveStorage::Filename.new("racecar").extension_without_delimiter # => "" + # ActiveStorage::Filename.new(".gitignore").extension_without_delimiter # => "" + def extension_without_delimiter + extension_with_delimiter.from(1).to_s + end + + alias_method :extension, :extension_without_delimiter + + # Returns the sanitized filename. + # + # ActiveStorage::Filename.new("foo:bar.jpg").sanitized # => "foo-bar.jpg" + # ActiveStorage::Filename.new("foo/bar.jpg").sanitized # => "foo-bar.jpg" + # + # Characters considered unsafe for storage (e.g. \, $, and the RTL override character) are replaced with a dash. + def sanitized + @filename.encode(Encoding::UTF_8, invalid: :replace, undef: :replace, replace: "�").strip.tr("\u{202E}%$|:;/\t\r\n\\", "-") + end + + def parameters + Parameters.new self + end + + # Returns the sanitized version of the filename. + def to_s + sanitized.to_s + end + + def as_json(*) + to_s + end + + def to_json + to_s + end + + def <=>(other) + to_s.downcase <=> other.to_s.downcase + end +end diff --git a/activestorage/app/models/active_storage/filename/parameters.rb b/activestorage/app/models/active_storage/filename/parameters.rb new file mode 100644 index 0000000000..58ce198d38 --- /dev/null +++ b/activestorage/app/models/active_storage/filename/parameters.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +class ActiveStorage::Filename::Parameters + attr_reader :filename + + def initialize(filename) + @filename = filename + end + + def combined + "#{ascii}; #{utf8}" + end + + TRADITIONAL_ESCAPED_CHAR = /[^ A-Za-z0-9!#$+.^_`|~-]/ + + def ascii + 'filename="' + percent_escape(I18n.transliterate(filename.sanitized), TRADITIONAL_ESCAPED_CHAR) + '"' + end + + RFC_5987_ESCAPED_CHAR = /[^A-Za-z0-9!#$&+.^_`|~-]/ + + def utf8 + "filename*=UTF-8''" + percent_escape(filename.sanitized, RFC_5987_ESCAPED_CHAR) + end + + def to_s + combined + end + + private + def percent_escape(string, pattern) + string.gsub(pattern) do |char| + char.bytes.map { |byte| "%%%02X" % byte }.join + end + end +end diff --git a/activestorage/app/models/active_storage/preview.rb b/activestorage/app/models/active_storage/preview.rb new file mode 100644 index 0000000000..be5053edae --- /dev/null +++ b/activestorage/app/models/active_storage/preview.rb @@ -0,0 +1,90 @@ +# frozen_string_literal: true + +# Some non-image blobs can be previewed: that is, they can be presented as images. A video blob can be previewed by +# extracting its first frame, and a PDF blob can be previewed by extracting its first page. +# +# A previewer extracts a preview image from a blob. Active Storage provides previewers for videos and PDFs: +# ActiveStorage::Previewer::VideoPreviewer and ActiveStorage::Previewer::PDFPreviewer. Build custom previewers by +# subclassing ActiveStorage::Previewer and implementing the requisite methods. Consult the ActiveStorage::Previewer +# documentation for more details on what's required of previewers. +# +# To choose the previewer for a blob, Active Storage calls +accept?+ on each registered previewer in order. It uses the +# first previewer for which +accept?+ returns true when given the blob. In a Rails application, add or remove previewers +# by manipulating +Rails.application.config.active_storage.previewers+ in an initializer: +# +# Rails.application.config.active_storage.previewers +# # => [ ActiveStorage::Previewer::PDFPreviewer, ActiveStorage::Previewer::VideoPreviewer ] +# +# # Add a custom previewer for Microsoft Office documents: +# Rails.application.config.active_storage.previewers << DOCXPreviewer +# # => [ ActiveStorage::Previewer::PDFPreviewer, ActiveStorage::Previewer::VideoPreviewer, DOCXPreviewer ] +# +# Outside of a Rails application, modify +ActiveStorage.previewers+ instead. +# +# The built-in previewers rely on third-party system libraries: +# +# * {ffmpeg}[https://www.ffmpeg.org] +# * {mupdf}[https://mupdf.com] +# +# These libraries are not provided by Rails. You must install them yourself to use the built-in previewers. Before you +# install and use third-party software, make sure you understand the licensing implications of doing so. +class ActiveStorage::Preview + class UnprocessedError < StandardError; end + + attr_reader :blob, :variation + + def initialize(blob, variation_or_variation_key) + @blob, @variation = blob, ActiveStorage::Variation.wrap(variation_or_variation_key) + end + + # Processes the preview if it has not been processed yet. Returns the receiving Preview instance for convenience: + # + # blob.preview(resize: "100x100").processed.service_url + # + # Processing a preview generates an image from its blob and attaches the preview image to the blob. Because the preview + # image is stored with the blob, it is only generated once. + def processed + process unless processed? + self + end + + # Returns the blob's attached preview image. + def image + blob.preview_image + end + + # Returns the URL of the preview's variant on the service. Raises ActiveStorage::Preview::UnprocessedError if the + # preview has not been processed yet. + # + # This method synchronously processes a variant of the preview image, so do not call it in views. Instead, generate + # a stable URL that redirects to the short-lived URL returned by this method. + def service_url(**options) + if processed? + variant.service_url(options) + else + raise UnprocessedError + end + end + + private + def processed? + image.attached? + end + + def process + previewer.preview { |attachable| image.attach(attachable) } + end + + def variant + ActiveStorage::Variant.new(image, variation).processed + end + + + def previewer + previewer_class.new(blob) + end + + def previewer_class + ActiveStorage.previewers.detect { |klass| klass.accept?(blob) } + end +end diff --git a/activestorage/app/models/active_storage/variant.rb b/activestorage/app/models/active_storage/variant.rb new file mode 100644 index 0000000000..fa5aa69bd3 --- /dev/null +++ b/activestorage/app/models/active_storage/variant.rb @@ -0,0 +1,86 @@ +# frozen_string_literal: true + +# Image blobs can have variants that are the result of a set of transformations applied to the original. +# These variants are used to create thumbnails, fixed-size avatars, or any other derivative image from the +# original. +# +# Variants rely on {MiniMagick}[https://github.com/minimagick/minimagick] for the actual transformations +# of the file, so you must add <tt>gem "mini_magick"</tt> to your Gemfile if you wish to use variants. +# +# Note that to create a variant it's necessary to download the entire blob file from the service and load it +# into memory. The larger the image, the more memory is used. Because of this process, you also want to be +# considerate about when the variant is actually processed. You shouldn't be processing variants inline in a +# template, for example. Delay the processing to an on-demand controller, like the one provided in +# ActiveStorage::VariantsController. +# +# To refer to such a delayed on-demand variant, simply link to the variant through the resolved route provided +# by Active Storage like so: +# +# <%= image_tag url_for(Current.user.avatar.variant(resize: "100x100")) %> +# +# This will create a URL for that specific blob with that specific variant, which the ActiveStorage::VariantsController +# can then produce on-demand. +# +# When you do want to actually produce the variant needed, call +processed+. This will check that the variant +# has already been processed and uploaded to the service, and, if so, just return that. Otherwise it will perform +# the transformations, upload the variant to the service, and return itself again. Example: +# +# avatar.variant(resize: "100x100").processed.service_url +# +# This will create and process a variant of the avatar blob that's constrained to a height and width of 100. +# Then it'll upload said variant to the service according to a derivative key of the blob and the transformations. +# +# A list of all possible transformations is available at https://www.imagemagick.org/script/mogrify.php. You can +# combine as many as you like freely: +# +# avatar.variant(resize: "100x100", monochrome: true, flip: "-90") +class ActiveStorage::Variant + attr_reader :blob, :variation + delegate :service, to: :blob + + def initialize(blob, variation_or_variation_key) + @blob, @variation = blob, ActiveStorage::Variation.wrap(variation_or_variation_key) + end + + # Returns the variant instance itself after it's been processed or an existing processing has been found on the service. + def processed + process unless processed? + self + end + + # Returns a combination key of the blob and the variation that together identifies a specific variant. + def key + "variants/#{blob.key}/#{Digest::SHA256.hexdigest(variation.key)}" + end + + # Returns the URL of the variant on the service. This URL is intended to be short-lived for security and not used directly + # with users. Instead, the +service_url+ should only be exposed as a redirect from a stable, possibly authenticated URL. + # Hiding the +service_url+ behind a redirect also gives you the power to change services without updating all URLs. And + # it allows permanent URLs that redirect to the +service_url+ to be cached in the view. + # + # Use <tt>url_for(variant)</tt> (or the implied form, like +link_to variant+ or +redirect_to variant+) to get the stable URL + # for a variant that points to the ActiveStorage::VariantsController, which in turn will use this +service_call+ method + # for its redirection. + def service_url(expires_in: service.url_expires_in, disposition: :inline) + service.url key, expires_in: expires_in, disposition: disposition, filename: blob.filename, content_type: blob.content_type + end + + # Returns the receiving variant. Allows ActiveStorage::Variant and ActiveStorage::Preview instances to be used interchangeably. + def image + self + end + + private + def processed? + service.exist?(key) + end + + def process + service.upload key, transform(service.download(blob.key)) + end + + def transform(io) + require "mini_magick" + File.open MiniMagick::Image.read(io).tap { |image| variation.transform(image) }.path + end +end diff --git a/activestorage/app/models/active_storage/variation.rb b/activestorage/app/models/active_storage/variation.rb new file mode 100644 index 0000000000..13bad87cac --- /dev/null +++ b/activestorage/app/models/active_storage/variation.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +# A set of transformations that can be applied to a blob to create a variant. This class is exposed via +# the ActiveStorage::Blob#variant method and should rarely be used directly. +# +# In case you do need to use this directly, it's instantiated using a hash of transformations where +# the key is the command and the value is the arguments. Example: +# +# ActiveStorage::Variation.new(resize: "100x100", monochrome: true, trim: true, rotate: "-90") +# +# A list of all possible transformations is available at https://www.imagemagick.org/script/mogrify.php. +class ActiveStorage::Variation + attr_reader :transformations + + class << self + # Returns a Variation instance based on the given variator. If the variator is a Variation, it is + # returned unmodified. If it is a String, it is passed to ActiveStorage::Variation.decode. Otherwise, + # it is assumed to be a transformations Hash and is passed directly to the constructor. + def wrap(variator) + case variator + when self + variator + when String + decode variator + else + new variator + end + end + + # Returns a Variation instance with the transformations that were encoded by +encode+. + def decode(key) + new ActiveStorage.verifier.verify(key, purpose: :variation) + end + + # Returns a signed key for the +transformations+, which can be used to refer to a specific + # variation in a URL or combined key (like <tt>ActiveStorage::Variant#key</tt>). + def encode(transformations) + ActiveStorage.verifier.generate(transformations, purpose: :variation) + end + end + + def initialize(transformations) + @transformations = transformations + end + + # Accepts an open MiniMagick image instance, like what's returned by <tt>MiniMagick::Image.read(io)</tt>, + # and performs the +transformations+ against it. The transformed image instance is then returned. + def transform(image) + transformations.each do |(method, argument)| + if eligible_argument?(argument) + image.public_send(method, argument) + else + image.public_send(method) + end + end + end + + # Returns a signed key for all the +transformations+ that this variation was instantiated with. + def key + self.class.encode(transformations) + end + + private + def eligible_argument?(argument) + argument.present? && argument != true + end +end |