New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@rails/activestorage

Package Overview
Dependencies
Maintainers
11
Versions
132
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@rails/activestorage - npm Package Compare versions

Comparing version 6.1.4 to 7.0.0-alpha1

app/assets/javascripts/activestorage.esm.js

633

app/assets/javascripts/activestorage.js
(function(global, factory) {
typeof exports === "object" && typeof module !== "undefined" ? factory(exports) : typeof define === "function" && define.amd ? define([ "exports" ], factory) : factory(global.ActiveStorage = {});
})(this, function(exports) {
typeof exports === "object" && typeof module !== "undefined" ? factory(exports) : typeof define === "function" && define.amd ? define([ "exports" ], factory) : (global = typeof globalThis !== "undefined" ? globalThis : global || self,
factory(global.ActiveStorage = {}));
})(this, (function(exports) {
"use strict";
function createCommonjsModule(fn, module) {
return module = {
exports: {}
}, fn(module, module.exports), module.exports;
}
var sparkMd5 = createCommonjsModule(function(module, exports) {
var sparkMd5 = {
exports: {}
};
(function(module, exports) {
(function(factory) {

@@ -15,3 +14,3 @@ {

}
})(function(undefined) {
})((function(undefined$1) {
var hex_chr = [ "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f" ];

@@ -247,3 +246,3 @@ function md5cycle(x, k) {

var length = this.byteLength, begin = clamp(from, length), end = length, num, target, targetArray, sourceArray;
if (to !== undefined) {
if (to !== undefined$1) {
end = clamp(to, length);

@@ -332,3 +331,3 @@ }

length: this._length,
hash: this._hash
hash: this._hash.slice()
};

@@ -418,36 +417,12 @@ };

return SparkMD5;
});
});
var classCallCheck = function(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}));
})(sparkMd5);
var SparkMD5 = sparkMd5.exports;
const fileSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
class FileChecksum {
static create(file, callback) {
const instance = new FileChecksum(file);
instance.create(callback);
}
};
var createClass = function() {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
return function(Constructor, protoProps, staticProps) {
if (protoProps) defineProperties(Constructor.prototype, protoProps);
if (staticProps) defineProperties(Constructor, staticProps);
return Constructor;
};
}();
var fileSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
var FileChecksum = function() {
createClass(FileChecksum, null, [ {
key: "create",
value: function create(file, callback) {
var instance = new FileChecksum(file);
instance.create(callback);
}
} ]);
function FileChecksum(file) {
classCallCheck(this, FileChecksum);
constructor(file) {
this.file = file;

@@ -458,51 +433,36 @@ this.chunkSize = 2097152;

}
createClass(FileChecksum, [ {
key: "create",
value: function create(callback) {
var _this = this;
this.callback = callback;
this.md5Buffer = new sparkMd5.ArrayBuffer();
this.fileReader = new FileReader();
this.fileReader.addEventListener("load", function(event) {
return _this.fileReaderDidLoad(event);
});
this.fileReader.addEventListener("error", function(event) {
return _this.fileReaderDidError(event);
});
this.readNextChunk();
create(callback) {
this.callback = callback;
this.md5Buffer = new SparkMD5.ArrayBuffer;
this.fileReader = new FileReader;
this.fileReader.addEventListener("load", (event => this.fileReaderDidLoad(event)));
this.fileReader.addEventListener("error", (event => this.fileReaderDidError(event)));
this.readNextChunk();
}
fileReaderDidLoad(event) {
this.md5Buffer.append(event.target.result);
if (!this.readNextChunk()) {
const binaryDigest = this.md5Buffer.end(true);
const base64digest = btoa(binaryDigest);
this.callback(null, base64digest);
}
}, {
key: "fileReaderDidLoad",
value: function fileReaderDidLoad(event) {
this.md5Buffer.append(event.target.result);
if (!this.readNextChunk()) {
var binaryDigest = this.md5Buffer.end(true);
var base64digest = btoa(binaryDigest);
this.callback(null, base64digest);
}
}
fileReaderDidError(event) {
this.callback(`Error reading ${this.file.name}`);
}
readNextChunk() {
if (this.chunkIndex < this.chunkCount || this.chunkIndex == 0 && this.chunkCount == 0) {
const start = this.chunkIndex * this.chunkSize;
const end = Math.min(start + this.chunkSize, this.file.size);
const bytes = fileSlice.call(this.file, start, end);
this.fileReader.readAsArrayBuffer(bytes);
this.chunkIndex++;
return true;
} else {
return false;
}
}, {
key: "fileReaderDidError",
value: function fileReaderDidError(event) {
this.callback("Error reading " + this.file.name);
}
}, {
key: "readNextChunk",
value: function readNextChunk() {
if (this.chunkIndex < this.chunkCount || this.chunkIndex == 0 && this.chunkCount == 0) {
var start = this.chunkIndex * this.chunkSize;
var end = Math.min(start + this.chunkSize, this.file.size);
var bytes = fileSlice.call(this.file, start, end);
this.fileReader.readAsArrayBuffer(bytes);
this.chunkIndex++;
return true;
} else {
return false;
}
}
} ]);
return FileChecksum;
}();
}
}
function getMetaValue(name) {
var element = findElement(document.head, 'meta[name="' + name + '"]');
const element = findElement(document.head, `meta[name="${name}"]`);
if (element) {

@@ -517,4 +477,4 @@ return element.getAttribute("content");

}
var elements = root.querySelectorAll(selector);
return toArray$1(elements);
const elements = root.querySelectorAll(selector);
return toArray(elements);
}

@@ -528,7 +488,6 @@ function findElement(root, selector) {

}
function dispatchEvent(element, type) {
var eventInit = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var disabled = element.disabled;
var bubbles = eventInit.bubbles, cancelable = eventInit.cancelable, detail = eventInit.detail;
var event = document.createEvent("Event");
function dispatchEvent(element, type, eventInit = {}) {
const {disabled: disabled} = element;
const {bubbles: bubbles, cancelable: cancelable, detail: detail} = eventInit;
const event = document.createEvent("Event");
event.initEvent(type, bubbles || true, cancelable || true);

@@ -544,3 +503,3 @@ event.detail = detail || {};

}
function toArray$1(value) {
function toArray(value) {
if (Array.isArray(value)) {

@@ -554,6 +513,4 @@ return value;

}
var BlobRecord = function() {
function BlobRecord(file, checksum, url) {
var _this = this;
classCallCheck(this, BlobRecord);
class BlobRecord {
constructor(file, checksum, url) {
this.file = file;

@@ -566,3 +523,3 @@ this.attributes = {

};
this.xhr = new XMLHttpRequest();
this.xhr = new XMLHttpRequest;
this.xhr.open("POST", url, true);

@@ -573,115 +530,82 @@ this.xhr.responseType = "json";

this.xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest");
var csrfToken = getMetaValue("csrf-token");
const csrfToken = getMetaValue("csrf-token");
if (csrfToken != undefined) {
this.xhr.setRequestHeader("X-CSRF-Token", csrfToken);
}
this.xhr.addEventListener("load", function(event) {
return _this.requestDidLoad(event);
});
this.xhr.addEventListener("error", function(event) {
return _this.requestDidError(event);
});
this.xhr.addEventListener("load", (event => this.requestDidLoad(event)));
this.xhr.addEventListener("error", (event => this.requestDidError(event)));
}
createClass(BlobRecord, [ {
key: "create",
value: function create(callback) {
this.callback = callback;
this.xhr.send(JSON.stringify({
blob: this.attributes
}));
get status() {
return this.xhr.status;
}
get response() {
const {responseType: responseType, response: response} = this.xhr;
if (responseType == "json") {
return response;
} else {
return JSON.parse(response);
}
}, {
key: "requestDidLoad",
value: function requestDidLoad(event) {
if (this.status >= 200 && this.status < 300) {
var response = this.response;
var direct_upload = response.direct_upload;
delete response.direct_upload;
this.attributes = response;
this.directUploadData = direct_upload;
this.callback(null, this.toJSON());
} else {
this.requestDidError(event);
}
}
create(callback) {
this.callback = callback;
this.xhr.send(JSON.stringify({
blob: this.attributes
}));
}
requestDidLoad(event) {
if (this.status >= 200 && this.status < 300) {
const {response: response} = this;
const {direct_upload: direct_upload} = response;
delete response.direct_upload;
this.attributes = response;
this.directUploadData = direct_upload;
this.callback(null, this.toJSON());
} else {
this.requestDidError(event);
}
}, {
key: "requestDidError",
value: function requestDidError(event) {
this.callback('Error creating Blob for "' + this.file.name + '". Status: ' + this.status);
}
requestDidError(event) {
this.callback(`Error creating Blob for "${this.file.name}". Status: ${this.status}`);
}
toJSON() {
const result = {};
for (const key in this.attributes) {
result[key] = this.attributes[key];
}
}, {
key: "toJSON",
value: function toJSON() {
var result = {};
for (var key in this.attributes) {
result[key] = this.attributes[key];
}
return result;
}
}, {
key: "status",
get: function get$$1() {
return this.xhr.status;
}
}, {
key: "response",
get: function get$$1() {
var _xhr = this.xhr, responseType = _xhr.responseType, response = _xhr.response;
if (responseType == "json") {
return response;
} else {
return JSON.parse(response);
}
}
} ]);
return BlobRecord;
}();
var BlobUpload = function() {
function BlobUpload(blob) {
var _this = this;
classCallCheck(this, BlobUpload);
return result;
}
}
class BlobUpload {
constructor(blob) {
this.blob = blob;
this.file = blob.file;
var _blob$directUploadDat = blob.directUploadData, url = _blob$directUploadDat.url, headers = _blob$directUploadDat.headers;
this.xhr = new XMLHttpRequest();
const {url: url, headers: headers} = blob.directUploadData;
this.xhr = new XMLHttpRequest;
this.xhr.open("PUT", url, true);
this.xhr.responseType = "text";
for (var key in headers) {
for (const key in headers) {
this.xhr.setRequestHeader(key, headers[key]);
}
this.xhr.addEventListener("load", function(event) {
return _this.requestDidLoad(event);
});
this.xhr.addEventListener("error", function(event) {
return _this.requestDidError(event);
});
this.xhr.addEventListener("load", (event => this.requestDidLoad(event)));
this.xhr.addEventListener("error", (event => this.requestDidError(event)));
}
createClass(BlobUpload, [ {
key: "create",
value: function create(callback) {
this.callback = callback;
this.xhr.send(this.file.slice());
create(callback) {
this.callback = callback;
this.xhr.send(this.file.slice());
}
requestDidLoad(event) {
const {status: status, response: response} = this.xhr;
if (status >= 200 && status < 300) {
this.callback(null, response);
} else {
this.requestDidError(event);
}
}, {
key: "requestDidLoad",
value: function requestDidLoad(event) {
var _xhr = this.xhr, status = _xhr.status, response = _xhr.response;
if (status >= 200 && status < 300) {
this.callback(null, response);
} else {
this.requestDidError(event);
}
}
}, {
key: "requestDidError",
value: function requestDidError(event) {
this.callback('Error storing "' + this.file.name + '". Status: ' + this.xhr.status);
}
} ]);
return BlobUpload;
}();
var id = 0;
var DirectUpload = function() {
function DirectUpload(file, url, delegate) {
classCallCheck(this, DirectUpload);
}
requestDidError(event) {
this.callback(`Error storing "${this.file.name}". Status: ${this.xhr.status}`);
}
}
let id = 0;
class DirectUpload {
constructor(file, url, delegate) {
this.id = ++id;

@@ -692,44 +616,35 @@ this.file = file;

}
createClass(DirectUpload, [ {
key: "create",
value: function create(callback) {
var _this = this;
FileChecksum.create(this.file, function(error, checksum) {
create(callback) {
FileChecksum.create(this.file, ((error, checksum) => {
if (error) {
callback(error);
return;
}
const blob = new BlobRecord(this.file, checksum, this.url);
notify(this.delegate, "directUploadWillCreateBlobWithXHR", blob.xhr);
blob.create((error => {
if (error) {
callback(error);
return;
} else {
const upload = new BlobUpload(blob);
notify(this.delegate, "directUploadWillStoreFileWithXHR", upload.xhr);
upload.create((error => {
if (error) {
callback(error);
} else {
callback(null, blob.toJSON());
}
}));
}
var blob = new BlobRecord(_this.file, checksum, _this.url);
notify(_this.delegate, "directUploadWillCreateBlobWithXHR", blob.xhr);
blob.create(function(error) {
if (error) {
callback(error);
} else {
var upload = new BlobUpload(blob);
notify(_this.delegate, "directUploadWillStoreFileWithXHR", upload.xhr);
upload.create(function(error) {
if (error) {
callback(error);
} else {
callback(null, blob.toJSON());
}
});
}
});
});
}
} ]);
return DirectUpload;
}();
function notify(object, methodName) {
}));
}));
}
}
function notify(object, methodName, ...messages) {
if (object && typeof object[methodName] == "function") {
for (var _len = arguments.length, messages = Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
messages[_key - 2] = arguments[_key];
}
return object[methodName].apply(object, messages);
return object[methodName](...messages);
}
}
var DirectUploadController = function() {
function DirectUploadController(input, file) {
classCallCheck(this, DirectUploadController);
class DirectUploadController {
constructor(input, file) {
this.input = input;

@@ -740,137 +655,103 @@ this.file = file;

}
createClass(DirectUploadController, [ {
key: "start",
value: function start(callback) {
var _this = this;
var hiddenInput = document.createElement("input");
hiddenInput.type = "hidden";
hiddenInput.name = this.input.name;
this.input.insertAdjacentElement("beforebegin", hiddenInput);
this.dispatch("start");
this.directUpload.create(function(error, attributes) {
if (error) {
hiddenInput.parentNode.removeChild(hiddenInput);
_this.dispatchError(error);
} else {
hiddenInput.value = attributes.signed_id;
}
_this.dispatch("end");
callback(error);
});
}
}, {
key: "uploadRequestDidProgress",
value: function uploadRequestDidProgress(event) {
var progress = event.loaded / event.total * 100;
if (progress) {
this.dispatch("progress", {
progress: progress
});
start(callback) {
const hiddenInput = document.createElement("input");
hiddenInput.type = "hidden";
hiddenInput.name = this.input.name;
this.input.insertAdjacentElement("beforebegin", hiddenInput);
this.dispatch("start");
this.directUpload.create(((error, attributes) => {
if (error) {
hiddenInput.parentNode.removeChild(hiddenInput);
this.dispatchError(error);
} else {
hiddenInput.value = attributes.signed_id;
}
}
}, {
key: "dispatch",
value: function dispatch(name) {
var detail = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
detail.file = this.file;
detail.id = this.directUpload.id;
return dispatchEvent(this.input, "direct-upload:" + name, {
detail: detail
this.dispatch("end");
callback(error);
}));
}
uploadRequestDidProgress(event) {
const progress = event.loaded / event.total * 100;
if (progress) {
this.dispatch("progress", {
progress: progress
});
}
}, {
key: "dispatchError",
value: function dispatchError(error) {
var event = this.dispatch("error", {
error: error
});
if (!event.defaultPrevented) {
alert(error);
}
}
get url() {
return this.input.getAttribute("data-direct-upload-url");
}
dispatch(name, detail = {}) {
detail.file = this.file;
detail.id = this.directUpload.id;
return dispatchEvent(this.input, `direct-upload:${name}`, {
detail: detail
});
}
dispatchError(error) {
const event = this.dispatch("error", {
error: error
});
if (!event.defaultPrevented) {
alert(error);
}
}, {
key: "directUploadWillCreateBlobWithXHR",
value: function directUploadWillCreateBlobWithXHR(xhr) {
this.dispatch("before-blob-request", {
xhr: xhr
});
}
}, {
key: "directUploadWillStoreFileWithXHR",
value: function directUploadWillStoreFileWithXHR(xhr) {
var _this2 = this;
this.dispatch("before-storage-request", {
xhr: xhr
});
xhr.upload.addEventListener("progress", function(event) {
return _this2.uploadRequestDidProgress(event);
});
}
}, {
key: "url",
get: function get$$1() {
return this.input.getAttribute("data-direct-upload-url");
}
} ]);
return DirectUploadController;
}();
var inputSelector = "input[type=file][data-direct-upload-url]:not([disabled])";
var DirectUploadsController = function() {
function DirectUploadsController(form) {
classCallCheck(this, DirectUploadsController);
}
directUploadWillCreateBlobWithXHR(xhr) {
this.dispatch("before-blob-request", {
xhr: xhr
});
}
directUploadWillStoreFileWithXHR(xhr) {
this.dispatch("before-storage-request", {
xhr: xhr
});
xhr.upload.addEventListener("progress", (event => this.uploadRequestDidProgress(event)));
}
}
const inputSelector = "input[type=file][data-direct-upload-url]:not([disabled])";
class DirectUploadsController {
constructor(form) {
this.form = form;
this.inputs = findElements(form, inputSelector).filter(function(input) {
return input.files.length;
this.inputs = findElements(form, inputSelector).filter((input => input.files.length));
}
start(callback) {
const controllers = this.createDirectUploadControllers();
const startNextController = () => {
const controller = controllers.shift();
if (controller) {
controller.start((error => {
if (error) {
callback(error);
this.dispatch("end");
} else {
startNextController();
}
}));
} else {
callback();
this.dispatch("end");
}
};
this.dispatch("start");
startNextController();
}
createDirectUploadControllers() {
const controllers = [];
this.inputs.forEach((input => {
toArray(input.files).forEach((file => {
const controller = new DirectUploadController(input, file);
controllers.push(controller);
}));
}));
return controllers;
}
dispatch(name, detail = {}) {
return dispatchEvent(this.form, `direct-uploads:${name}`, {
detail: detail
});
}
createClass(DirectUploadsController, [ {
key: "start",
value: function start(callback) {
var _this = this;
var controllers = this.createDirectUploadControllers();
var startNextController = function startNextController() {
var controller = controllers.shift();
if (controller) {
controller.start(function(error) {
if (error) {
callback(error);
_this.dispatch("end");
} else {
startNextController();
}
});
} else {
callback();
_this.dispatch("end");
}
};
this.dispatch("start");
startNextController();
}
}, {
key: "createDirectUploadControllers",
value: function createDirectUploadControllers() {
var controllers = [];
this.inputs.forEach(function(input) {
toArray$1(input.files).forEach(function(file) {
var controller = new DirectUploadController(input, file);
controllers.push(controller);
});
});
return controllers;
}
}, {
key: "dispatch",
value: function dispatch(name) {
var detail = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
return dispatchEvent(this.form, "direct-uploads:" + name, {
detail: detail
});
}
} ]);
return DirectUploadsController;
}();
var processingAttribute = "data-direct-uploads-processing";
var submitButtonsByForm = new WeakMap();
var started = false;
}
const processingAttribute = "data-direct-uploads-processing";
const submitButtonsByForm = new WeakMap;
let started = false;
function start() {

@@ -880,3 +761,3 @@ if (!started) {

document.addEventListener("click", didClick, true);
document.addEventListener("submit", didSubmitForm);
document.addEventListener("submit", didSubmitForm, true);
document.addEventListener("ajax:before", didSubmitRemoteElement);

@@ -886,3 +767,3 @@ }

function didClick(event) {
var target = event.target;
const {target: target} = event;
if ((target.tagName == "INPUT" || target.tagName == "BUTTON") && target.type == "submit" && target.form) {

@@ -901,3 +782,3 @@ submitButtonsByForm.set(target.form, target);

function handleFormSubmissionEvent(event) {
var form = event.target;
const form = event.target;
if (form.hasAttribute(processingAttribute)) {

@@ -907,4 +788,4 @@ event.preventDefault();

}
var controller = new DirectUploadsController(form);
var inputs = controller.inputs;
const controller = new DirectUploadsController(form);
const {inputs: inputs} = controller;
if (inputs.length) {

@@ -914,3 +795,3 @@ event.preventDefault();

inputs.forEach(disable);
controller.start(function(error) {
controller.start((error => {
form.removeAttribute(processingAttribute);

@@ -922,9 +803,9 @@ if (error) {

}
});
}));
}
}
function submitForm(form) {
var button = submitButtonsByForm.get(form) || findElement(form, "input[type=submit], button[type=submit]");
let button = submitButtonsByForm.get(form) || findElement(form, "input[type=submit], button[type=submit]");
if (button) {
var _button = button, disabled = _button.disabled;
const {disabled: disabled} = button;
button.disabled = false;

@@ -956,7 +837,7 @@ button.focus();

setTimeout(autostart, 1);
exports.DirectUpload = DirectUpload;
exports.start = start;
exports.DirectUpload = DirectUpload;
Object.defineProperty(exports, "__esModule", {
value: true
});
});
}));

@@ -1,306 +0,221 @@

## Rails 6.1.4 (June 24, 2021) ##
## Rails 7.0.0.alpha1 (September 15, 2021) ##
* The parameters sent to `ffmpeg` for generating a video preview image are now
configurable under `config.active_storage.video_preview_arguments`.
* Emit Active Support instrumentation events from Active Storage analyzers.
*Brendon Muir*
Fixes #42930
* Fix Active Storage update task when running in an engine.
*Shouichi Kamiya*
Justin Malčić*
* Add support for byte range requests
* Don't raise an error if the mime type is not recognized.
*Tom Prats*
Fixes #41777.
* Attachments can be deleted after their association is no longer defined.
*Alex Ghiculescu*
Fixes #42514
* `ActiveStorage::PreviewError` is raised when a previewer is unable to generate a preview image.
*Don Sisco*
*Alex Robbin*
* Make `vips` the default variant processor for new apps.
* respond with 404 given invalid variation key when asking for representations.
See the upgrade guide for instructions on converting from `mini_magick` to `vips`. `mini_magick` is
not deprecated, existing apps can keep using it.
*George Claghorn*
*Breno Gazzola*
* `Blob` creation shouldn't crash if no service selected.
* Deprecate `ActiveStorage::Current.host` in favor of `ActiveStorage::Current.url_options` which accepts
a host, protocol and port.
*Alex Ghiculescu*
*Santiago Bartesaghi*
* Allow using [IAM](https://cloud.google.com/storage/docs/access-control/signed-urls) when signing URLs with GCS.
## Rails 6.1.3.2 (May 05, 2021) ##
```yaml
gcs:
service: GCS
...
iam: true
```
* No changes.
*RRethy*
* OpenSSL constants are now used for Digest computations.
## Rails 6.1.3.1 (March 26, 2021) ##
*Dirkjan Bussink*
* Marcel is upgraded to version 1.0.0 to avoid a dependency on GPL-licensed
mime types data.
* Deprecate `config.active_storage.replace_on_assign_to_many`. Future versions of Rails
will behave the same way as when the config is set to `true`.
*George Claghorn*
*Santiago Bartesaghi*
* Remove deprecated methods: `build_after_upload`, `create_after_upload!` in favor of `create_and_upload!`,
and `service_url` in favor of `url`.
## Rails 6.1.3 (February 17, 2021) ##
*Santiago Bartesaghi*
* No changes.
* Add support of `strict_loading_by_default` to `ActiveStorage::Representations` controllers.
*Anton Topchii*, *Andrew White*
## Rails 6.1.2.1 (February 10, 2021) ##
* Allow to detach an attachment when record is not persisted.
* No changes.
*Jacopo Beschi*
* Use libvips instead of ImageMagick to analyze images when `active_storage.variant_processor = vips`.
## Rails 6.1.2 (February 09, 2021) ##
*Breno Gazzola*
* No changes.
* Add metadata value for presence of video channel in video blobs.
The `metadata` attribute of video blobs has a new boolean key named `video` that is set to
`true` if the file has an video channel and `false` if it doesn't.
## Rails 6.1.1 (January 07, 2021) ##
*Breno Gazzola*
* Fix S3 multipart uploads when threshold is larger than file.
* Deprecate usage of `purge` and `purge_later` from the association extension.
*Matt Muller*
*Jacopo Beschi*
* Passing extra parameters in `ActiveStorage::Blob#url` to S3 Client.
## Rails 6.1.0 (December 09, 2020) ##
This allows calls of `ActiveStorage::Blob#url` to have more interaction with
the S3 Presigner, enabling, amongst other options, custom S3 domain URL
Generation.
* Change default queue name of the analysis (`:active_storage_analysis`) and
purge (`:active_storage_purge`) jobs to be the job adapter's default (`:default`).
```ruby
blob = ActiveStorage::Blob.last
*Rafael Mendonça França*
blob.url # => https://<bucket-name>.s3.<region>.amazonaws.com/<key>
blob.url(virtual_host: true) # => # => https://<bucket-name>/<key>
```
* Implement `strict_loading` on ActiveStorage associations.
*josegomezr*
*David Angulo*
* Allow setting a `Cache-Control` on files uploaded to GCS.
* Remove deprecated support to pass `:combine_options` operations to `ActiveStorage::Transformers::ImageProcessing`.
```yaml
gcs:
service: GCS
...
cache_control: "public, max-age=3600"
```
*Rafael Mendonça França*
*maleblond*
* Remove deprecated `ActiveStorage::Transformers::MiniMagickTransformer`.
* The parameters sent to `ffmpeg` for generating a video preview image are now
configurable under `config.active_storage.video_preview_arguments`.
*Rafael Mendonça França*
*Brendon Muir*
* Remove deprecated `config.active_storage.queue`.
* The ActiveStorage video previewer will now use scene change detection to generate
better preview images (rather than the previous default of using the first frame
of the video). This change requires FFmpeg v3.4+.
*Rafael Mendonça França*
*Jonathan Hefner*
* Remove deprecated `ActiveStorage::Downloading`.
* Add support for ActiveStorage expiring URLs.
*Rafael Mendonça França*
* Add per-environment configuration support
*Pietro Moro*
* The Poppler PDF previewer renders a preview image using the original
document's crop box rather than its media box, hiding print margins. This
matches the behavior of the MuPDF previewer.
*Vincent Robert*
* Touch parent model when an attachment is purged.
*Víctor Pérez Rodríguez*
* Files can now be served by proxying them from the underlying storage service
instead of redirecting to a signed service URL. Use the
`rails_storage_proxy_path` and `_url` helpers to proxy an attached file:
```erb
<%= image_tag rails_storage_proxy_path(@user.avatar) %>
```
To proxy by default, set `config.active_storage.resolve_model_to_route`:
```ruby
# Proxy attached files instead.
config.active_storage.resolve_model_to_route = :rails_storage_proxy
```
rails_blob_path(user.avatar, disposition: "attachment", expires_in: 30.minutes)
```erb
<%= image_tag @user.avatar %>
<%= image_tag rails_blob_path(user.avatar.variant(resize: "100x100"), expires_in: 30.minutes) %>
```
To redirect to a signed service URL when the default file serving strategy
is set to proxying, use the `rails_storage_redirect_path` and `_url` helpers:
If you want to set default expiration time for ActiveStorage URLs throughout your application, set `config.active_storage.urls_expire_in`.
```erb
<%= image_tag rails_storage_redirect_path(@user.avatar) %>
```
*aki77*
*Jonathan Fleckenstein*
* Allow to purge an attachment when record is not persisted for `has_many_attached`.
* Add `config.active_storage.web_image_content_types` to allow applications
to add content types (like `image/webp`) in which variants can be processed,
instead of letting those images be converted to the fallback PNG format.
*Jacopo Beschi*
*Jeroen van Haperen*
* Add `with_all_variant_records` method to eager load all variant records on an attachment at once.
`with_attached_image` scope now eager loads variant records if using variant tracking.
* Add support for creating variants of `WebP` images out of the box.
*Alex Ghiculescu*
*Dino Maric*
* Add metadata value for presence of audio channel in video blobs.
* Only enqueue analysis jobs for blobs with non-null analyzer classes.
The `metadata` attribute of video blobs has a new boolean key named `audio` that is set to
`true` if the file has an audio channel and `false` if it doesn't.
*Gannon McGibbon*
*Breno Gazzola*
* Previews are created on the same service as the original blob.
* Adds analyzer for audio files.
*Peter Zhu*
*Breno Gazzola*
* Remove unused `disposition` and `content_type` query parameters for `DiskService`.
* Respect Active Record's primary_key_type in Active Storage migrations.
*Peter Zhu*
*fatkodima*
* Use `DiskController` for both public and private files.
* Allow `expires_in` for ActiveStorage signed ids.
`DiskController` is able to handle multiple services by adding a
`service_name` field in the generated URL in `DiskService`.
*aki77*
*Peter Zhu*
* Allow to purge an attachment when record is not persisted for `has_one_attached`.
* Variants are tracked in the database to avoid existence checks in the storage service.
*Jacopo Beschi*
*George Claghorn*
* Add a load hook called `active_storage_variant_record` (providing `ActiveStorage::VariantRecord`)
to allow for overriding aspects of the `ActiveStorage::VariantRecord` class. This makes
`ActiveStorage::VariantRecord` consistent with `ActiveStorage::Blob` and `ActiveStorage::Attachment`
that already have load hooks.
* Deprecate `service_url` methods in favour of `url`.
*Brendon Muir*
Deprecate `Variant#service_url` and `Preview#service_url` to instead use
`#url` method to be consistent with `Blob`.
* `ActiveStorage::PreviewError` is raised when a previewer is unable to generate a preview image.
*Peter Zhu*
*Alex Robbin*
* Permanent URLs for public storage blobs.
* Add `ActiveStorage::Streaming` module that can be included in a controller to get access to `#send_blob_stream`,
which wraps the new `ActionController::Base#send_stream` method to stream a blob from cloud storage:
Services can be configured in `config/storage.yml` with a new key
`public: true | false` to indicate whether a service holds public
blobs or private blobs. Public services will always return a permanent URL.
```ruby
class MyPublicBlobsController < ApplicationController
include ActiveStorage::SetBlob, ActiveStorage::Streaming
Deprecates `Blob#service_url` in favor of `Blob#url`.
def show
http_cache_forever(public: true) do
send_blob_stream @blob, disposition: params[:disposition]
end
end
end
```
*Peter Zhu*
*DHH*
* Make services aware of configuration names.
* Add ability to use pre-defined variants.
*Gannon McGibbon*
* The `Content-Type` header is set on image variants when they're uploaded to third-party storage services.
*Kyle Ribordy*
* Allow storage services to be configured per attachment.
```ruby
class User < ActiveRecord::Base
has_one_attached :avatar, service: :s3
has_one_attached :avatar do |attachable|
attachable.variant :thumb, resize: "100x100"
attachable.variant :medium, resize: "300x300", monochrome: true
end
end
class Gallery < ActiveRecord::Base
has_many_attached :photos, service: :s3
has_many_attached :photos do |attachable|
attachable.variant :thumb, resize: "100x100"
attachable.variant :medium, resize: "300x300", monochrome: true
end
end
```
*Dmitry Tsepelev*
* You can optionally provide a custom blob key when attaching a new file:
```ruby
user.avatar.attach key: "avatars/#{user.id}.jpg",
io: io, content_type: "image/jpeg", filename: "avatar.jpg"
<%= image_tag user.avatar.variant(:thumb) %>
```
Active Storage will store the blob's data on the configured service at the provided key.
*fatkodima*
*George Claghorn*
* After setting `config.active_storage.resolve_model_to_route = :rails_storage_proxy`
`rails_blob_path` and `rails_representation_path` will generate proxy URLs by default.
* Replace `Blob.create_after_upload!` with `Blob.create_and_upload!` and deprecate the former.
*Ali Ismayilov*
`create_after_upload!` has been removed since it could lead to data
corruption by uploading to a key on the storage service which happened to
be already taken. Creating the record would then correctly raise a
database uniqueness exception but the stored object would already have
overwritten another. `create_and_upload!` swaps the order of operations
so that the key gets reserved up-front or the uniqueness error gets raised,
before the upload to a key takes place.
* Declare `ActiveStorage::FixtureSet` and `ActiveStorage::FixtureSet.blob` to
improve fixture integration.
*Julik Tarkhanov*
*Sean Doyle*
* Set content disposition in direct upload using `filename` and `disposition` parameters to `ActiveStorage::Service#headers_for_direct_upload`.
*Peter Zhu*
* Allow record to be optionally passed to blob finders to make sharding
easier.
*Gannon McGibbon*
* Switch from `azure-storage` gem to `azure-storage-blob` gem for Azure service.
*Peter Zhu*
* Add `config.active_storage.draw_routes` to disable Active Storage routes.
*Gannon McGibbon*
* Image analysis is skipped if ImageMagick returns an error.
`ActiveStorage::Analyzer::ImageAnalyzer#metadata` would previously raise a
`MiniMagick::Error`, which caused persistent `ActiveStorage::AnalyzeJob`
failures. It now logs the error and returns `{}`, resulting in no metadata
being added to the offending image blob.
*George Claghorn*
* Method calls on singular attachments return `nil` when no file is attached.
Previously, assuming the following User model, `user.avatar.filename` would
raise a `Module::DelegationError` if no avatar was attached:
```ruby
class User < ApplicationRecord
has_one_attached :avatar
end
```
They now return `nil`.
*Matthew Tanous*
* The mirror service supports direct uploads.
New files are directly uploaded to the primary service. When a
directly-uploaded file is attached to a record, a background job is enqueued
to copy it to each secondary service.
Configure the queue used to process mirroring jobs by setting
`config.active_storage.queues.mirror`. The default is `:active_storage_mirror`.
*George Claghorn*
* The S3 service now permits uploading files larger than 5 gigabytes.
When uploading a file greater than 100 megabytes in size, the service
transparently switches to [multipart uploads](https://docs.aws.amazon.com/AmazonS3/latest/dev/mpuoverview.html)
using a part size computed from the file's total size and S3's part count limit.
No application changes are necessary to take advantage of this feature. You
can customize the default 100 MB multipart upload threshold in your S3
service's configuration:
```yaml
production:
service: s3
access_key_id: <%= Rails.application.credentials.dig(:aws, :access_key_id) %>
secret_access_key: <%= Rails.application.credentials.dig(:aws, :secret_access_key) %>
region: us-east-1
bucket: my-bucket
upload:
multipart_threshold: <%= 250.megabytes %>
```
*George Claghorn*
Please check [6-0-stable](https://github.com/rails/rails/blob/6-0-stable/activestorage/CHANGELOG.md) for previous changes.
Please check [6-1-stable](https://github.com/rails/rails/blob/6-1-stable/activestorage/CHANGELOG.md) for previous changes.
{
"name": "@rails/activestorage",
"version": "6.1.4",
"version": "7.0.0-alpha1",
"description": "Attach cloud and local files in Rails applications",
"main": "app/assets/javascripts/activestorage.js",
"type": "module",
"files": [

@@ -21,15 +22,11 @@ "app/assets/javascripts/*.js",

"dependencies": {
"spark-md5": "^3.0.0"
"spark-md5": "^3.0.1"
},
"devDependencies": {
"babel-core": "^6.25.0",
"babel-plugin-external-helpers": "^6.22.0",
"babel-preset-env": "^1.6.0",
"@rollup/plugin-node-resolve": "^11.0.1",
"@rollup/plugin-commonjs": "^19.0.1",
"eslint": "^4.3.0",
"eslint-plugin-import": "^2.7.0",
"rollup": "^0.58.2",
"rollup-plugin-babel": "^3.0.4",
"rollup-plugin-commonjs": "^9.1.0",
"rollup-plugin-node-resolve": "^3.3.0",
"rollup-plugin-uglify": "^3.0.0"
"eslint-plugin-import": "^2.23.4",
"rollup": "^2.35.1",
"rollup-plugin-terser": "^7.0.2"
},

@@ -36,0 +33,0 @@ "scripts": {

@@ -35,3 +35,3 @@ # Active Storage

# Attach an avatar to the user.
user.avatar.attach(io: File.open("/path/to/face.jpg"), filename: "face.jpg", content_type: "image/jpg")
user.avatar.attach(io: File.open("/path/to/face.jpg"), filename: "face.jpg", content_type: "image/jpeg")

@@ -92,4 +92,3 @@ # Does the user have an avatar?

def create
message = Message.create! params.require(:message).permit(:title, :content)
message.images.attach(params[:message][:images])
message = Message.create! params.require(:message).permit(:title, :content, images: [])
redirect_to message

@@ -129,10 +128,4 @@ end

Explicitly proxy attachments using the `rails_storage_proxy_path` and `_url` route helpers:
You can configure Active Storage to use proxying by default:
```erb
<%= image_tag rails_storage_proxy_path(@user.avatar) %>
```
Or configure Active Storage to use proxying by default:
```ruby

@@ -143,2 +136,8 @@ # config/initializers/active_storage.rb

Or if you want to explicitly proxy specific attachments there are URL helpers you can use in the form of `rails_storage_proxy_path` and `rails_storage_proxy_url`.
```erb
<%= image_tag rails_storage_proxy_path(@user.avatar) %>
```
## Direct uploads

@@ -150,4 +149,19 @@

1. Include `activestorage.js` in your application's JavaScript bundle.
1. Include the Active Storage JavaScript in your application's JavaScript bundle or reference it directly.
Requiring directly without bundling through the asset pipeline in the application html with autostart:
```html
<%= javascript_include_tag "activestorage" %>
```
Requiring via importmap-rails without bundling through the asset pipeline in the application html without autostart as ESM:
```ruby
# config/importmap.rb
pin "@rails/activestorage", to: "activestorage.esm.js"
```
```html
<script type="module-shim">
import * as ActiveStorage from "@rails/activestorage"
ActiveStorage.start()
</script>
```
Using the asset pipeline:

@@ -154,0 +168,0 @@ ```js

@@ -12,3 +12,3 @@ import { DirectUploadsController } from "./direct_uploads_controller"

document.addEventListener("click", didClick, true)
document.addEventListener("submit", didSubmitForm)
document.addEventListener("submit", didSubmitForm, true)
document.addEventListener("ajax:before", didSubmitRemoteElement)

@@ -15,0 +15,0 @@ }

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc