@thirdweb-dev/storage
Advanced tools
Comparing version 0.2.2 to 0.2.3-nightly-349b5c1
@@ -29,522 +29,6 @@ 'use strict'; | ||
function ownKeys(object, enumerableOnly) { | ||
var keys = Object.keys(object); | ||
if (Object.getOwnPropertySymbols) { | ||
var symbols = Object.getOwnPropertySymbols(object); | ||
enumerableOnly && (symbols = symbols.filter(function (sym) { | ||
return Object.getOwnPropertyDescriptor(object, sym).enumerable; | ||
})), keys.push.apply(keys, symbols); | ||
} | ||
return keys; | ||
} | ||
function _objectSpread2(target) { | ||
for (var i = 1; i < arguments.length; i++) { | ||
var source = null != arguments[i] ? arguments[i] : {}; | ||
i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { | ||
_defineProperty(target, key, source[key]); | ||
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { | ||
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); | ||
}); | ||
} | ||
return target; | ||
} | ||
function _typeof(obj) { | ||
"@babel/helpers - typeof"; | ||
return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { | ||
return typeof obj; | ||
} : function (obj) { | ||
return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; | ||
}, _typeof(obj); | ||
} | ||
function _arrayLikeToArray(arr, len) { | ||
if (len == null || len > arr.length) len = arr.length; | ||
for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; | ||
return arr2; | ||
} | ||
function _unsupportedIterableToArray(o, minLen) { | ||
if (!o) return; | ||
if (typeof o === "string") return _arrayLikeToArray(o, minLen); | ||
var n = Object.prototype.toString.call(o).slice(8, -1); | ||
if (n === "Object" && o.constructor) n = o.constructor.name; | ||
if (n === "Map" || n === "Set") return Array.from(o); | ||
if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); | ||
} | ||
function _createForOfIteratorHelper(o, allowArrayLike) { | ||
var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; | ||
if (!it) { | ||
if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { | ||
if (it) o = it; | ||
var i = 0; | ||
var F = function () {}; | ||
return { | ||
s: F, | ||
n: function () { | ||
if (i >= o.length) return { | ||
done: true | ||
}; | ||
return { | ||
done: false, | ||
value: o[i++] | ||
}; | ||
}, | ||
e: function (e) { | ||
throw e; | ||
}, | ||
f: F | ||
}; | ||
} | ||
throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); | ||
} | ||
var normalCompletion = true, | ||
didErr = false, | ||
err; | ||
return { | ||
s: function () { | ||
it = it.call(o); | ||
}, | ||
n: function () { | ||
var step = it.next(); | ||
normalCompletion = step.done; | ||
return step; | ||
}, | ||
e: function (e) { | ||
didErr = true; | ||
err = e; | ||
}, | ||
f: function () { | ||
try { | ||
if (!normalCompletion && it.return != null) it.return(); | ||
} finally { | ||
if (didErr) throw err; | ||
} | ||
} | ||
}; | ||
} | ||
function _regeneratorRuntime() { | ||
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */ | ||
_regeneratorRuntime = function () { | ||
return exports; | ||
}; | ||
var exports = {}, | ||
Op = Object.prototype, | ||
hasOwn = Op.hasOwnProperty, | ||
$Symbol = "function" == typeof Symbol ? Symbol : {}, | ||
iteratorSymbol = $Symbol.iterator || "@@iterator", | ||
asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator", | ||
toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag"; | ||
function define(obj, key, value) { | ||
return Object.defineProperty(obj, key, { | ||
value: value, | ||
enumerable: !0, | ||
configurable: !0, | ||
writable: !0 | ||
}), obj[key]; | ||
} | ||
try { | ||
define({}, ""); | ||
} catch (err) { | ||
define = function (obj, key, value) { | ||
return obj[key] = value; | ||
}; | ||
} | ||
function wrap(innerFn, outerFn, self, tryLocsList) { | ||
var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator, | ||
generator = Object.create(protoGenerator.prototype), | ||
context = new Context(tryLocsList || []); | ||
return generator._invoke = function (innerFn, self, context) { | ||
var state = "suspendedStart"; | ||
return function (method, arg) { | ||
if ("executing" === state) throw new Error("Generator is already running"); | ||
if ("completed" === state) { | ||
if ("throw" === method) throw arg; | ||
return doneResult(); | ||
} | ||
for (context.method = method, context.arg = arg;;) { | ||
var delegate = context.delegate; | ||
if (delegate) { | ||
var delegateResult = maybeInvokeDelegate(delegate, context); | ||
if (delegateResult) { | ||
if (delegateResult === ContinueSentinel) continue; | ||
return delegateResult; | ||
} | ||
} | ||
if ("next" === context.method) context.sent = context._sent = context.arg;else if ("throw" === context.method) { | ||
if ("suspendedStart" === state) throw state = "completed", context.arg; | ||
context.dispatchException(context.arg); | ||
} else "return" === context.method && context.abrupt("return", context.arg); | ||
state = "executing"; | ||
var record = tryCatch(innerFn, self, context); | ||
if ("normal" === record.type) { | ||
if (state = context.done ? "completed" : "suspendedYield", record.arg === ContinueSentinel) continue; | ||
return { | ||
value: record.arg, | ||
done: context.done | ||
}; | ||
} | ||
"throw" === record.type && (state = "completed", context.method = "throw", context.arg = record.arg); | ||
} | ||
}; | ||
}(innerFn, self, context), generator; | ||
} | ||
function tryCatch(fn, obj, arg) { | ||
try { | ||
return { | ||
type: "normal", | ||
arg: fn.call(obj, arg) | ||
}; | ||
} catch (err) { | ||
return { | ||
type: "throw", | ||
arg: err | ||
}; | ||
} | ||
} | ||
exports.wrap = wrap; | ||
var ContinueSentinel = {}; | ||
function Generator() {} | ||
function GeneratorFunction() {} | ||
function GeneratorFunctionPrototype() {} | ||
var IteratorPrototype = {}; | ||
define(IteratorPrototype, iteratorSymbol, function () { | ||
return this; | ||
}); | ||
var getProto = Object.getPrototypeOf, | ||
NativeIteratorPrototype = getProto && getProto(getProto(values([]))); | ||
NativeIteratorPrototype && NativeIteratorPrototype !== Op && hasOwn.call(NativeIteratorPrototype, iteratorSymbol) && (IteratorPrototype = NativeIteratorPrototype); | ||
var Gp = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(IteratorPrototype); | ||
function defineIteratorMethods(prototype) { | ||
["next", "throw", "return"].forEach(function (method) { | ||
define(prototype, method, function (arg) { | ||
return this._invoke(method, arg); | ||
}); | ||
}); | ||
} | ||
function AsyncIterator(generator, PromiseImpl) { | ||
function invoke(method, arg, resolve, reject) { | ||
var record = tryCatch(generator[method], generator, arg); | ||
if ("throw" !== record.type) { | ||
var result = record.arg, | ||
value = result.value; | ||
return value && "object" == typeof value && hasOwn.call(value, "__await") ? PromiseImpl.resolve(value.__await).then(function (value) { | ||
invoke("next", value, resolve, reject); | ||
}, function (err) { | ||
invoke("throw", err, resolve, reject); | ||
}) : PromiseImpl.resolve(value).then(function (unwrapped) { | ||
result.value = unwrapped, resolve(result); | ||
}, function (error) { | ||
return invoke("throw", error, resolve, reject); | ||
}); | ||
} | ||
reject(record.arg); | ||
} | ||
var previousPromise; | ||
this._invoke = function (method, arg) { | ||
function callInvokeWithMethodAndArg() { | ||
return new PromiseImpl(function (resolve, reject) { | ||
invoke(method, arg, resolve, reject); | ||
}); | ||
} | ||
return previousPromise = previousPromise ? previousPromise.then(callInvokeWithMethodAndArg, callInvokeWithMethodAndArg) : callInvokeWithMethodAndArg(); | ||
}; | ||
} | ||
function maybeInvokeDelegate(delegate, context) { | ||
var method = delegate.iterator[context.method]; | ||
if (undefined === method) { | ||
if (context.delegate = null, "throw" === context.method) { | ||
if (delegate.iterator.return && (context.method = "return", context.arg = undefined, maybeInvokeDelegate(delegate, context), "throw" === context.method)) return ContinueSentinel; | ||
context.method = "throw", context.arg = new TypeError("The iterator does not provide a 'throw' method"); | ||
} | ||
return ContinueSentinel; | ||
} | ||
var record = tryCatch(method, delegate.iterator, context.arg); | ||
if ("throw" === record.type) return context.method = "throw", context.arg = record.arg, context.delegate = null, ContinueSentinel; | ||
var info = record.arg; | ||
return info ? info.done ? (context[delegate.resultName] = info.value, context.next = delegate.nextLoc, "return" !== context.method && (context.method = "next", context.arg = undefined), context.delegate = null, ContinueSentinel) : info : (context.method = "throw", context.arg = new TypeError("iterator result is not an object"), context.delegate = null, ContinueSentinel); | ||
} | ||
function pushTryEntry(locs) { | ||
var entry = { | ||
tryLoc: locs[0] | ||
}; | ||
1 in locs && (entry.catchLoc = locs[1]), 2 in locs && (entry.finallyLoc = locs[2], entry.afterLoc = locs[3]), this.tryEntries.push(entry); | ||
} | ||
function resetTryEntry(entry) { | ||
var record = entry.completion || {}; | ||
record.type = "normal", delete record.arg, entry.completion = record; | ||
} | ||
function Context(tryLocsList) { | ||
this.tryEntries = [{ | ||
tryLoc: "root" | ||
}], tryLocsList.forEach(pushTryEntry, this), this.reset(!0); | ||
} | ||
function values(iterable) { | ||
if (iterable) { | ||
var iteratorMethod = iterable[iteratorSymbol]; | ||
if (iteratorMethod) return iteratorMethod.call(iterable); | ||
if ("function" == typeof iterable.next) return iterable; | ||
if (!isNaN(iterable.length)) { | ||
var i = -1, | ||
next = function next() { | ||
for (; ++i < iterable.length;) if (hasOwn.call(iterable, i)) return next.value = iterable[i], next.done = !1, next; | ||
return next.value = undefined, next.done = !0, next; | ||
}; | ||
return next.next = next; | ||
} | ||
} | ||
return { | ||
next: doneResult | ||
}; | ||
} | ||
function doneResult() { | ||
return { | ||
value: undefined, | ||
done: !0 | ||
}; | ||
} | ||
return GeneratorFunction.prototype = GeneratorFunctionPrototype, define(Gp, "constructor", GeneratorFunctionPrototype), define(GeneratorFunctionPrototype, "constructor", GeneratorFunction), GeneratorFunction.displayName = define(GeneratorFunctionPrototype, toStringTagSymbol, "GeneratorFunction"), exports.isGeneratorFunction = function (genFun) { | ||
var ctor = "function" == typeof genFun && genFun.constructor; | ||
return !!ctor && (ctor === GeneratorFunction || "GeneratorFunction" === (ctor.displayName || ctor.name)); | ||
}, exports.mark = function (genFun) { | ||
return Object.setPrototypeOf ? Object.setPrototypeOf(genFun, GeneratorFunctionPrototype) : (genFun.__proto__ = GeneratorFunctionPrototype, define(genFun, toStringTagSymbol, "GeneratorFunction")), genFun.prototype = Object.create(Gp), genFun; | ||
}, exports.awrap = function (arg) { | ||
return { | ||
__await: arg | ||
}; | ||
}, defineIteratorMethods(AsyncIterator.prototype), define(AsyncIterator.prototype, asyncIteratorSymbol, function () { | ||
return this; | ||
}), exports.AsyncIterator = AsyncIterator, exports.async = function (innerFn, outerFn, self, tryLocsList, PromiseImpl) { | ||
void 0 === PromiseImpl && (PromiseImpl = Promise); | ||
var iter = new AsyncIterator(wrap(innerFn, outerFn, self, tryLocsList), PromiseImpl); | ||
return exports.isGeneratorFunction(outerFn) ? iter : iter.next().then(function (result) { | ||
return result.done ? result.value : iter.next(); | ||
}); | ||
}, defineIteratorMethods(Gp), define(Gp, toStringTagSymbol, "Generator"), define(Gp, iteratorSymbol, function () { | ||
return this; | ||
}), define(Gp, "toString", function () { | ||
return "[object Generator]"; | ||
}), exports.keys = function (object) { | ||
var keys = []; | ||
for (var key in object) keys.push(key); | ||
return keys.reverse(), function next() { | ||
for (; keys.length;) { | ||
var key = keys.pop(); | ||
if (key in object) return next.value = key, next.done = !1, next; | ||
} | ||
return next.done = !0, next; | ||
}; | ||
}, exports.values = values, Context.prototype = { | ||
constructor: Context, | ||
reset: function (skipTempReset) { | ||
if (this.prev = 0, this.next = 0, this.sent = this._sent = undefined, this.done = !1, this.delegate = null, this.method = "next", this.arg = undefined, this.tryEntries.forEach(resetTryEntry), !skipTempReset) for (var name in this) "t" === name.charAt(0) && hasOwn.call(this, name) && !isNaN(+name.slice(1)) && (this[name] = undefined); | ||
}, | ||
stop: function () { | ||
this.done = !0; | ||
var rootRecord = this.tryEntries[0].completion; | ||
if ("throw" === rootRecord.type) throw rootRecord.arg; | ||
return this.rval; | ||
}, | ||
dispatchException: function (exception) { | ||
if (this.done) throw exception; | ||
var context = this; | ||
function handle(loc, caught) { | ||
return record.type = "throw", record.arg = exception, context.next = loc, caught && (context.method = "next", context.arg = undefined), !!caught; | ||
} | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i], | ||
record = entry.completion; | ||
if ("root" === entry.tryLoc) return handle("end"); | ||
if (entry.tryLoc <= this.prev) { | ||
var hasCatch = hasOwn.call(entry, "catchLoc"), | ||
hasFinally = hasOwn.call(entry, "finallyLoc"); | ||
if (hasCatch && hasFinally) { | ||
if (this.prev < entry.catchLoc) return handle(entry.catchLoc, !0); | ||
if (this.prev < entry.finallyLoc) return handle(entry.finallyLoc); | ||
} else if (hasCatch) { | ||
if (this.prev < entry.catchLoc) return handle(entry.catchLoc, !0); | ||
} else { | ||
if (!hasFinally) throw new Error("try statement without catch or finally"); | ||
if (this.prev < entry.finallyLoc) return handle(entry.finallyLoc); | ||
} | ||
} | ||
} | ||
}, | ||
abrupt: function (type, arg) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.tryLoc <= this.prev && hasOwn.call(entry, "finallyLoc") && this.prev < entry.finallyLoc) { | ||
var finallyEntry = entry; | ||
break; | ||
} | ||
} | ||
finallyEntry && ("break" === type || "continue" === type) && finallyEntry.tryLoc <= arg && arg <= finallyEntry.finallyLoc && (finallyEntry = null); | ||
var record = finallyEntry ? finallyEntry.completion : {}; | ||
return record.type = type, record.arg = arg, finallyEntry ? (this.method = "next", this.next = finallyEntry.finallyLoc, ContinueSentinel) : this.complete(record); | ||
}, | ||
complete: function (record, afterLoc) { | ||
if ("throw" === record.type) throw record.arg; | ||
return "break" === record.type || "continue" === record.type ? this.next = record.arg : "return" === record.type ? (this.rval = this.arg = record.arg, this.method = "return", this.next = "end") : "normal" === record.type && afterLoc && (this.next = afterLoc), ContinueSentinel; | ||
}, | ||
finish: function (finallyLoc) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.finallyLoc === finallyLoc) return this.complete(entry.completion, entry.afterLoc), resetTryEntry(entry), ContinueSentinel; | ||
} | ||
}, | ||
catch: function (tryLoc) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.tryLoc === tryLoc) { | ||
var record = entry.completion; | ||
if ("throw" === record.type) { | ||
var thrown = record.arg; | ||
resetTryEntry(entry); | ||
} | ||
return thrown; | ||
} | ||
} | ||
throw new Error("illegal catch attempt"); | ||
}, | ||
delegateYield: function (iterable, resultName, nextLoc) { | ||
return this.delegate = { | ||
iterator: values(iterable), | ||
resultName: resultName, | ||
nextLoc: nextLoc | ||
}, "next" === this.method && (this.arg = undefined), ContinueSentinel; | ||
} | ||
}, exports; | ||
} | ||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { | ||
try { | ||
var info = gen[key](arg); | ||
var value = info.value; | ||
} catch (error) { | ||
reject(error); | ||
return; | ||
} | ||
if (info.done) { | ||
resolve(value); | ||
} else { | ||
Promise.resolve(value).then(_next, _throw); | ||
} | ||
} | ||
function _asyncToGenerator(fn) { | ||
return function () { | ||
var self = this, | ||
args = arguments; | ||
return new Promise(function (resolve, reject) { | ||
var gen = fn.apply(self, args); | ||
function _next(value) { | ||
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); | ||
} | ||
function _throw(err) { | ||
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); | ||
} | ||
_next(undefined); | ||
}); | ||
}; | ||
} | ||
function _classCallCheck(instance, Constructor) { | ||
if (!(instance instanceof Constructor)) { | ||
throw new TypeError("Cannot call a class as a function"); | ||
} | ||
} | ||
function _defineProperties(target, props) { | ||
for (var i = 0; i < props.length; i++) { | ||
var descriptor = props[i]; | ||
descriptor.enumerable = descriptor.enumerable || false; | ||
descriptor.configurable = true; | ||
if ("value" in descriptor) descriptor.writable = true; | ||
Object.defineProperty(target, descriptor.key, descriptor); | ||
} | ||
} | ||
function _createClass(Constructor, protoProps, staticProps) { | ||
if (protoProps) _defineProperties(Constructor.prototype, protoProps); | ||
if (staticProps) _defineProperties(Constructor, staticProps); | ||
Object.defineProperty(Constructor, "prototype", { | ||
writable: false | ||
}); | ||
return Constructor; | ||
} | ||
/** | ||
* @internal | ||
*/ | ||
var DEFAULT_IPFS_GATEWAY = "https://gateway.ipfscdn.io/ipfs/"; | ||
const DEFAULT_IPFS_GATEWAY = "https://gateway.ipfscdn.io/ipfs/"; | ||
/** | ||
@@ -554,3 +38,3 @@ * @internal | ||
var PUBLIC_GATEWAYS = ["https://gateway.ipfscdn.io/ipfs/", "https://cloudflare-ipfs.com/ipfs/", "https://ipfs.io/ipfs/"]; | ||
const PUBLIC_GATEWAYS = ["https://gateway.ipfscdn.io/ipfs/", "https://cloudflare-ipfs.com/ipfs/", "https://ipfs.io/ipfs/"]; | ||
/** | ||
@@ -560,3 +44,3 @@ * @internal | ||
var TW_IPFS_SERVER_URL = "https://upload.nftlabs.co"; | ||
const TW_IPFS_SERVER_URL = "https://upload.nftlabs.co"; | ||
/** | ||
@@ -566,3 +50,3 @@ * @internal | ||
var PINATA_IPFS_URL = "https://api.pinata.cloud/pinning/pinFileToIPFS"; | ||
const PINATA_IPFS_URL = "https://api.pinata.cloud/pinning/pinFileToIPFS"; | ||
@@ -589,9 +73,9 @@ function isFileInstance(data) { | ||
function replaceFilePropertiesWithHashes(object, cids) { | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
var isFile = isFileInstance(val) || isBufferInstance(val); | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
const isFile = isFileInstance(val) || isBufferInstance(val); | ||
if (_typeof(val) === "object" && !isFile) { | ||
if (typeof val === "object" && !isFile) { | ||
replaceFilePropertiesWithHashes(val, cids); | ||
@@ -623,11 +107,11 @@ continue; | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
object[keys[key]] = resolveGatewayUrl(val, scheme, gatewayUrl); | ||
if (Array.isArray(val)) { | ||
object[keys[key]] = val.map(function (el) { | ||
if (_typeof(el) === "object") { | ||
object[keys[key]] = val.map(el => { | ||
if (typeof el === "object") { | ||
return replaceHashWithGatewayUrl(el, scheme, gatewayUrl); | ||
@@ -640,3 +124,3 @@ } else { | ||
if (_typeof(val) === "object") { | ||
if (typeof val === "object") { | ||
replaceHashWithGatewayUrl(val, scheme, gatewayUrl); | ||
@@ -661,13 +145,13 @@ } | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
object[keys[key]] = toIPFSHash(val, scheme, gatewayUrl); | ||
if (Array.isArray(val)) { | ||
object[keys[key]] = val.map(function (el) { | ||
var isFile = isFileInstance(el) || isBufferInstance(el); | ||
object[keys[key]] = val.map(el => { | ||
const isFile = isFileInstance(el) || isBufferInstance(el); | ||
if (_typeof(el) === "object" && !isFile) { | ||
if (typeof el === "object" && !isFile) { | ||
return replaceGatewayUrlWithHash(el, scheme, gatewayUrl); | ||
@@ -680,5 +164,5 @@ } else { | ||
var isFile = isFileInstance(val) || isBufferInstance(val); | ||
const isFile = isFileInstance(val) || isBufferInstance(val); | ||
if (_typeof(val) === "object" && !isFile) { | ||
if (typeof val === "object" && !isFile) { | ||
replaceGatewayUrlWithHash(val, scheme, gatewayUrl); | ||
@@ -728,274 +212,178 @@ } | ||
var PinataUploader = /*#__PURE__*/function () { | ||
function PinataUploader() { | ||
_classCallCheck(this, PinataUploader); | ||
} | ||
class PinataUploader { | ||
/** | ||
* Fetches a one-time-use upload token that can used to upload | ||
* a file to storage. | ||
* | ||
* @returns - The one time use token that can be passed to the Pinata API. | ||
*/ | ||
async getUploadToken(contractAddress) { | ||
const headers = { | ||
"X-App-Name": "CONSOLE-TS-SDK-".concat(contractAddress) | ||
}; | ||
const res = await fetch__default["default"]("".concat(TW_IPFS_SERVER_URL, "/grant"), { | ||
method: "GET", | ||
headers | ||
}); | ||
_createClass(PinataUploader, [{ | ||
key: "getUploadToken", | ||
value: | ||
/** | ||
* Fetches a one-time-use upload token that can used to upload | ||
* a file to storage. | ||
* | ||
* @returns - The one time use token that can be passed to the Pinata API. | ||
*/ | ||
function () { | ||
var _getUploadToken = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(contractAddress) { | ||
var headers, res, body; | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
headers = { | ||
"X-App-Name": "CONSOLE-TS-SDK-".concat(contractAddress) | ||
}; | ||
_context.next = 3; | ||
return fetch__default["default"]("".concat(TW_IPFS_SERVER_URL, "/grant"), { | ||
method: "GET", | ||
headers: headers | ||
}); | ||
if (!res.ok) { | ||
throw new Error("Failed to get upload token"); | ||
} | ||
case 3: | ||
res = _context.sent; | ||
const body = await res.text(); | ||
return body; | ||
} | ||
if (res.ok) { | ||
_context.next = 6; | ||
break; | ||
} | ||
async uploadBatchWithCid(files) { | ||
let fileStartNumber = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; | ||
let contractAddress = arguments.length > 2 ? arguments[2] : undefined; | ||
let signerAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let options = arguments.length > 4 ? arguments[4] : undefined; | ||
const token = await this.getUploadToken(contractAddress || ""); | ||
const formData = new FormData__default["default"](); | ||
const { | ||
data, | ||
fileNames | ||
} = this.buildFormData(formData, files, fileStartNumber, contractAddress, signerAddress); | ||
throw new Error("Failed to get upload token"); | ||
if (typeof window === "undefined") { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
console.warn("The onProgress option is only supported in the browser"); | ||
} | ||
case 6: | ||
_context.next = 8; | ||
return res.text(); | ||
const res = await fetch__default["default"](PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: { | ||
Authorization: "Bearer ".concat(token), | ||
...data.getHeaders() | ||
}, | ||
body: data.getBuffer() | ||
}); | ||
const body = await res.json(); | ||
case 8: | ||
body = _context.sent; | ||
return _context.abrupt("return", body); | ||
if (!res.ok) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
case 10: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee); | ||
})); | ||
const cid = body.IpfsHash; | ||
function getUploadToken(_x) { | ||
return _getUploadToken.apply(this, arguments); | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
return getUploadToken; | ||
}() | ||
}, { | ||
key: "uploadBatchWithCid", | ||
value: function () { | ||
var _uploadBatchWithCid = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(files) { | ||
var fileStartNumber, | ||
contractAddress, | ||
signerAddress, | ||
options, | ||
token, | ||
formData, | ||
_this$buildFormData, | ||
data, | ||
fileNames, | ||
res, | ||
body, | ||
cid, | ||
_args2 = arguments; | ||
return { | ||
cid, | ||
fileNames | ||
}; | ||
} else { | ||
return new Promise((resolve, reject) => { | ||
const xhr = new XMLHttpRequest(); | ||
xhr.open("POST", PINATA_IPFS_URL); | ||
xhr.setRequestHeader("Authorization", "Bearer ".concat(token)); | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
fileStartNumber = _args2.length > 1 && _args2[1] !== undefined ? _args2[1] : 0; | ||
contractAddress = _args2.length > 2 ? _args2[2] : undefined; | ||
signerAddress = _args2.length > 3 ? _args2[3] : undefined; | ||
options = _args2.length > 4 ? _args2[4] : undefined; | ||
_context2.next = 6; | ||
return this.getUploadToken(contractAddress || ""); | ||
xhr.onloadend = () => { | ||
if (xhr.status !== 200) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
case 6: | ||
token = _context2.sent; | ||
formData = new FormData__default["default"](); | ||
_this$buildFormData = this.buildFormData(formData, files, fileStartNumber, contractAddress, signerAddress), data = _this$buildFormData.data, fileNames = _this$buildFormData.fileNames; | ||
const cid = JSON.parse(xhr.responseText).IpfsHash; | ||
if (!(typeof window === "undefined")) { | ||
_context2.next = 25; | ||
break; | ||
} | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
console.warn("The onProgress option is only supported in the browser"); | ||
} | ||
resolve({ | ||
cid, | ||
fileNames | ||
}); | ||
}; | ||
_context2.next = 13; | ||
return fetch__default["default"](PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: _objectSpread2({ | ||
Authorization: "Bearer ".concat(token) | ||
}, data.getHeaders()), | ||
body: data.getBuffer() | ||
}); | ||
xhr.onerror = err => { | ||
reject(err); | ||
}; | ||
case 13: | ||
res = _context2.sent; | ||
_context2.next = 16; | ||
return res.json(); | ||
if (xhr.upload) { | ||
xhr.upload.onprogress = event => { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
options === null || options === void 0 ? void 0 : options.onProgress({ | ||
progress: event.loaded, | ||
total: event.total | ||
}); | ||
} | ||
}; | ||
} | ||
case 16: | ||
body = _context2.sent; | ||
xhr.send(data); | ||
}); | ||
} | ||
} | ||
if (res.ok) { | ||
_context2.next = 19; | ||
break; | ||
} | ||
buildFormData(data, files) { | ||
let fileStartNumber = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; | ||
let contractAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let signerAddress = arguments.length > 4 ? arguments[4] : undefined; | ||
const metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress, | ||
signerAddress | ||
} | ||
}; | ||
const fileNames = []; | ||
files.forEach((file, i) => { | ||
let fileName = ""; | ||
let fileData = file; // if it is a file, we passthrough the file extensions, | ||
// if it is a buffer or string, the filename would be fileStartNumber + index | ||
// if it is a buffer or string with names, the filename would be the name | ||
throw new Error("Failed to upload files to IPFS"); | ||
if (isFileInstance(file)) { | ||
let extensions = ""; | ||
case 19: | ||
cid = body.IpfsHash; | ||
if (file.name) { | ||
const extensionStartIndex = file.name.lastIndexOf("."); | ||
if (cid) { | ||
_context2.next = 22; | ||
break; | ||
} | ||
throw new Error("Failed to upload files to IPFS"); | ||
case 22: | ||
return _context2.abrupt("return", { | ||
cid: cid, | ||
fileNames: fileNames | ||
}); | ||
case 25: | ||
return _context2.abrupt("return", new Promise(function (resolve, reject) { | ||
var xhr = new XMLHttpRequest(); | ||
xhr.open("POST", PINATA_IPFS_URL); | ||
xhr.setRequestHeader("Authorization", "Bearer ".concat(token)); | ||
xhr.onloadend = function () { | ||
if (xhr.status !== 200) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
var cid = JSON.parse(xhr.responseText).IpfsHash; | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
resolve({ | ||
cid: cid, | ||
fileNames: fileNames | ||
}); | ||
}; | ||
xhr.onerror = function (err) { | ||
reject(err); | ||
}; | ||
if (xhr.upload) { | ||
xhr.upload.onprogress = function (event) { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
options === null || options === void 0 ? void 0 : options.onProgress({ | ||
progress: event.loaded, | ||
total: event.total | ||
}); | ||
} | ||
}; | ||
} | ||
xhr.send(data); | ||
})); | ||
case 26: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
if (extensionStartIndex > -1) { | ||
extensions = file.name.substring(extensionStartIndex); | ||
} | ||
}, _callee2, this); | ||
})); | ||
} | ||
function uploadBatchWithCid(_x2) { | ||
return _uploadBatchWithCid.apply(this, arguments); | ||
fileName = "".concat(i + fileStartNumber).concat(extensions); | ||
} else if (isBufferInstance(file) || typeof file === "string") { | ||
fileName = "".concat(i + fileStartNumber); | ||
} else if (file && file.name && file !== null && file !== void 0 && file.data) { | ||
fileData = file === null || file === void 0 ? void 0 : file.data; | ||
fileName = "".concat(file.name); | ||
} else { | ||
// default behavior | ||
fileName = "".concat(i + fileStartNumber); | ||
} | ||
return uploadBatchWithCid; | ||
}() | ||
}, { | ||
key: "buildFormData", | ||
value: function buildFormData(data, files) { | ||
var fileStartNumber = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; | ||
var contractAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
var signerAddress = arguments.length > 4 ? arguments[4] : undefined; | ||
var metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress: contractAddress, | ||
signerAddress: signerAddress | ||
} | ||
}; | ||
var fileNames = []; | ||
files.forEach(function (file, i) { | ||
var fileName = ""; | ||
var fileData = file; // if it is a file, we passthrough the file extensions, | ||
// if it is a buffer or string, the filename would be fileStartNumber + index | ||
// if it is a buffer or string with names, the filename would be the name | ||
const filepath = "files/".concat(fileName); | ||
if (isFileInstance(file)) { | ||
var extensions = ""; | ||
if (fileNames.indexOf(fileName) > -1) { | ||
throw new Error("DUPLICATE_FILE_NAME_ERROR: File name ".concat(fileName, " was passed for more than one file.")); | ||
} | ||
if (file.name) { | ||
var extensionStartIndex = file.name.lastIndexOf("."); | ||
fileNames.push(fileName); | ||
if (extensionStartIndex > -1) { | ||
extensions = file.name.substring(extensionStartIndex); | ||
} | ||
} | ||
if (typeof window === "undefined") { | ||
data.append("file", fileData, { | ||
filepath | ||
}); | ||
} else { | ||
// browser does blob things, filepath is parsed differently on browser vs node. | ||
// pls pinata? | ||
data.append("file", new Blob([fileData]), filepath); | ||
} | ||
}); | ||
data.append("pinataMetadata", JSON.stringify(metadata)); | ||
return { | ||
data, | ||
fileNames | ||
}; | ||
} | ||
fileName = "".concat(i + fileStartNumber).concat(extensions); | ||
} else if (isBufferInstance(file) || typeof file === "string") { | ||
fileName = "".concat(i + fileStartNumber); | ||
} else if (file && file.name && file !== null && file !== void 0 && file.data) { | ||
fileData = file === null || file === void 0 ? void 0 : file.data; | ||
fileName = "".concat(file.name); | ||
} else { | ||
// default behavior | ||
fileName = "".concat(i + fileStartNumber); | ||
} | ||
} | ||
var filepath = "files/".concat(fileName); | ||
if (fileNames.indexOf(fileName) > -1) { | ||
throw new Error("DUPLICATE_FILE_NAME_ERROR: File name ".concat(fileName, " was passed for more than one file.")); | ||
} | ||
fileNames.push(fileName); | ||
if (typeof window === "undefined") { | ||
data.append("file", fileData, { | ||
filepath: filepath | ||
}); | ||
} else { | ||
// browser does blob things, filepath is parsed differently on browser vs node. | ||
// pls pinata? | ||
data.append("file", new Blob([fileData]), filepath); | ||
} | ||
}); | ||
data.append("pinataMetadata", JSON.stringify(metadata)); | ||
return { | ||
data: data, | ||
fileNames: fileNames | ||
}; | ||
} | ||
}]); | ||
return PinataUploader; | ||
}(); | ||
/** | ||
@@ -1007,3 +395,3 @@ * IPFS Storage implementation, accepts custom IPFS gateways | ||
var IpfsStorage = /*#__PURE__*/function () { | ||
class IpfsStorage { | ||
/** | ||
@@ -1013,8 +401,6 @@ * {@inheritdoc IStorage.gatewayUrl} | ||
*/ | ||
function IpfsStorage() { | ||
var gatewayUrl = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : DEFAULT_IPFS_GATEWAY; | ||
var uploader = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : new PinataUploader(); | ||
constructor() { | ||
let gatewayUrl = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : DEFAULT_IPFS_GATEWAY; | ||
let uploader = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : new PinataUploader(); | ||
_classCallCheck(this, IpfsStorage); | ||
_defineProperty(this, "gatewayUrl", void 0); | ||
@@ -1030,555 +416,251 @@ | ||
_createClass(IpfsStorage, [{ | ||
key: "getNextPublicGateway", | ||
value: function getNextPublicGateway() { | ||
var _this = this; | ||
getNextPublicGateway() { | ||
const urlsToTry = PUBLIC_GATEWAYS.filter(url => !this.failedUrls.includes(url)).filter(url => url !== this.gatewayUrl); | ||
var urlsToTry = PUBLIC_GATEWAYS.filter(function (url) { | ||
return !_this.failedUrls.includes(url); | ||
}).filter(function (url) { | ||
return url !== _this.gatewayUrl; | ||
}); | ||
if (urlsToTry.length > 0) { | ||
return urlsToTry[0]; | ||
} else { | ||
this.failedUrls = []; | ||
return undefined; | ||
} | ||
if (urlsToTry.length > 0) { | ||
return urlsToTry[0]; | ||
} else { | ||
this.failedUrls = []; | ||
return undefined; | ||
} | ||
/** | ||
* Upload a file to IPFS and return the hash | ||
* @remarks This method is a wrapper around {@link IStorage.upload} | ||
* @example | ||
* ```javascript | ||
* const file = './path/to/file.png'; // Can be a path or a File object such as a file from an input element. | ||
* const hash = await sdk.storage.upload(file); | ||
* ``` | ||
* | ||
* | ||
*/ | ||
} | ||
/** | ||
* Upload a file to IPFS and return the hash | ||
* @remarks This method is a wrapper around {@link IStorage.upload} | ||
* @example | ||
* ```javascript | ||
* const file = './path/to/file.png'; // Can be a path or a File object such as a file from an input element. | ||
* const hash = await sdk.storage.upload(file); | ||
* ``` | ||
* | ||
* | ||
*/ | ||
}, { | ||
key: "upload", | ||
value: function () { | ||
var _upload = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(data, contractAddress, signerAddress, options) { | ||
var _yield$this$uploader$, cid, fileNames, baseUri; | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
_context.next = 2; | ||
return this.uploader.uploadBatchWithCid([data], 0, contractAddress, signerAddress, options); | ||
async upload(data, contractAddress, signerAddress, options) { | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid([data], 0, contractAddress, signerAddress, options); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
return "".concat(baseUri).concat(fileNames[0]); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadBatch} | ||
*/ | ||
case 2: | ||
_yield$this$uploader$ = _context.sent; | ||
cid = _yield$this$uploader$.cid; | ||
fileNames = _yield$this$uploader$.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
return _context.abrupt("return", "".concat(baseUri).concat(fileNames[0])); | ||
case 7: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
async uploadBatch(files) { | ||
let fileStartNumber = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; | ||
let contractAddress = arguments.length > 2 ? arguments[2] : undefined; | ||
let signerAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let options = arguments.length > 4 ? arguments[4] : undefined; | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(files, fileStartNumber, contractAddress, signerAddress, options); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
const uris = fileNames.map(filename => "".concat(baseUri).concat(filename)); | ||
return { | ||
baseUri, | ||
uris | ||
}; | ||
} | ||
/** | ||
* {@inheritDoc IStorage.get} | ||
*/ | ||
function upload(_x, _x2, _x3, _x4) { | ||
return _upload.apply(this, arguments); | ||
} | ||
return upload; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadBatch} | ||
*/ | ||
async get(hash) { | ||
const res = await this._get(hash); | ||
const json = await res.json(); | ||
return replaceHashWithGatewayUrl(json, "ipfs://", this.gatewayUrl); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.getRaw} | ||
*/ | ||
}, { | ||
key: "uploadBatch", | ||
value: function () { | ||
var _uploadBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(files) { | ||
var fileStartNumber, | ||
contractAddress, | ||
signerAddress, | ||
options, | ||
_yield$this$uploader$2, | ||
cid, | ||
fileNames, | ||
baseUri, | ||
uris, | ||
_args2 = arguments; | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
fileStartNumber = _args2.length > 1 && _args2[1] !== undefined ? _args2[1] : 0; | ||
contractAddress = _args2.length > 2 ? _args2[2] : undefined; | ||
signerAddress = _args2.length > 3 ? _args2[3] : undefined; | ||
options = _args2.length > 4 ? _args2[4] : undefined; | ||
_context2.next = 6; | ||
return this.uploader.uploadBatchWithCid(files, fileStartNumber, contractAddress, signerAddress, options); | ||
async getRaw(hash) { | ||
const res = await this._get(hash); | ||
return await res.text(); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadata} | ||
*/ | ||
case 6: | ||
_yield$this$uploader$2 = _context2.sent; | ||
cid = _yield$this$uploader$2.cid; | ||
fileNames = _yield$this$uploader$2.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
uris = fileNames.map(function (filename) { | ||
return "".concat(baseUri).concat(filename); | ||
}); | ||
return _context2.abrupt("return", { | ||
baseUri: baseUri, | ||
uris: uris | ||
}); | ||
case 12: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
async uploadMetadata(metadata, contractAddress, signerAddress, options) { | ||
// since there's only single object, always use the first index | ||
const { | ||
uris | ||
} = await this.uploadMetadataBatch([metadata], 0, contractAddress, signerAddress, options); | ||
return uris[0]; | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadataBatch} | ||
*/ | ||
function uploadBatch(_x5) { | ||
return _uploadBatch.apply(this, arguments); | ||
} | ||
return uploadBatch; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.get} | ||
*/ | ||
async uploadMetadataBatch(metadatas, fileStartNumber, contractAddress, signerAddress, options) { | ||
const metadataToUpload = (await this.batchUploadProperties(metadatas, options)).map(m => JSON.stringify(m)); | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(metadataToUpload, fileStartNumber, contractAddress, signerAddress); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
const uris = fileNames.map(filename => "".concat(baseUri).concat(filename)); | ||
return { | ||
baseUri, | ||
uris | ||
}; | ||
} | ||
/** ************************* | ||
* PRIVATE FUNCTIONS | ||
*************************/ | ||
}, { | ||
key: "get", | ||
value: function () { | ||
var _get2 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee3(hash) { | ||
var res, json; | ||
return _regeneratorRuntime().wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this._get(hash); | ||
case 2: | ||
res = _context3.sent; | ||
_context3.next = 5; | ||
return res.json(); | ||
async _get(hash) { | ||
let uri = hash; | ||
case 5: | ||
json = _context3.sent; | ||
return _context3.abrupt("return", replaceHashWithGatewayUrl(json, "ipfs://", this.gatewayUrl)); | ||
if (hash) { | ||
uri = resolveGatewayUrl(hash, "ipfs://", this.gatewayUrl); | ||
} | ||
case 7: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
const result = await fetch__default["default"](uri); | ||
function get(_x6) { | ||
return _get2.apply(this, arguments); | ||
} | ||
if (!result.ok && result.status === 500) { | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
} | ||
return get; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.getRaw} | ||
*/ | ||
if (!result.ok && result.status !== 404) { | ||
const nextUrl = this.getNextPublicGateway(); | ||
}, { | ||
key: "getRaw", | ||
value: function () { | ||
var _getRaw = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee4(hash) { | ||
var res; | ||
return _regeneratorRuntime().wrap(function _callee4$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return this._get(hash); | ||
case 2: | ||
res = _context4.sent; | ||
_context4.next = 5; | ||
return res.text(); | ||
case 5: | ||
return _context4.abrupt("return", _context4.sent); | ||
case 6: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function getRaw(_x7) { | ||
return _getRaw.apply(this, arguments); | ||
if (nextUrl) { | ||
this.failedUrls.push(this.gatewayUrl); | ||
this.gatewayUrl = nextUrl; | ||
return this._get(hash); | ||
} else { | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
} | ||
} | ||
return getRaw; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadata} | ||
*/ | ||
return result; | ||
} | ||
/** | ||
* Pre-processes metadata and uploads all file properties | ||
* to storage in *bulk*, then performs a string replacement of | ||
* all file properties -\> the resulting ipfs uri. This is | ||
* called internally by `uploadMetadataBatch`. | ||
* | ||
* @internal | ||
* | ||
* @returns - The processed metadata with properties pointing at ipfs in place of `File | Buffer` | ||
* @param metadatas | ||
* @param options | ||
*/ | ||
}, { | ||
key: "uploadMetadata", | ||
value: function () { | ||
var _uploadMetadata = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee5(metadata, contractAddress, signerAddress, options) { | ||
var _yield$this$uploadMet, uris; | ||
return _regeneratorRuntime().wrap(function _callee5$(_context5) { | ||
while (1) { | ||
switch (_context5.prev = _context5.next) { | ||
case 0: | ||
_context5.next = 2; | ||
return this.uploadMetadataBatch([metadata], 0, contractAddress, signerAddress, options); | ||
async batchUploadProperties(metadatas, options) { | ||
// replace all active gateway url links with their raw ipfs hash | ||
const sanitizedMetadatas = replaceGatewayUrlWithHash(metadatas, "ipfs://", this.gatewayUrl); // extract any binary file to upload | ||
case 2: | ||
_yield$this$uploadMet = _context5.sent; | ||
uris = _yield$this$uploadMet.uris; | ||
return _context5.abrupt("return", uris[0]); | ||
const filesToUpload = sanitizedMetadatas.flatMap(m => this.buildFilePropertiesMap(m, [])); // if no binary files to upload, return the metadata | ||
case 5: | ||
case "end": | ||
return _context5.stop(); | ||
} | ||
} | ||
}, _callee5, this); | ||
})); | ||
if (filesToUpload.length === 0) { | ||
return sanitizedMetadatas; | ||
} // otherwise upload those files | ||
function uploadMetadata(_x8, _x9, _x10, _x11) { | ||
return _uploadMetadata.apply(this, arguments); | ||
} | ||
return uploadMetadata; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadataBatch} | ||
*/ | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(filesToUpload, undefined, undefined, undefined, options); | ||
const cids = []; // recurse ordered array | ||
}, { | ||
key: "uploadMetadataBatch", | ||
value: function () { | ||
var _uploadMetadataBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee6(metadatas, fileStartNumber, contractAddress, signerAddress, options) { | ||
var metadataToUpload, _yield$this$uploader$3, cid, fileNames, baseUri, uris; | ||
for (const filename of fileNames) { | ||
cids.push("".concat(cid, "/").concat(filename)); | ||
} // replace all files with their ipfs hash | ||
return _regeneratorRuntime().wrap(function _callee6$(_context6) { | ||
while (1) { | ||
switch (_context6.prev = _context6.next) { | ||
case 0: | ||
_context6.next = 2; | ||
return this.batchUploadProperties(metadatas, options); | ||
case 2: | ||
metadataToUpload = _context6.sent.map(function (m) { | ||
return JSON.stringify(m); | ||
}); | ||
_context6.next = 5; | ||
return this.uploader.uploadBatchWithCid(metadataToUpload, fileStartNumber, contractAddress, signerAddress); | ||
return replaceFilePropertiesWithHashes(sanitizedMetadatas, cids); | ||
} | ||
/** | ||
* This function recurisely traverses an object and hashes any | ||
* `Buffer` or `File` objects into the returned map. | ||
* | ||
* @param object - the Json Object | ||
* @param files - The running array of files or buffer to upload | ||
* @returns - The final map of all hashes to files | ||
*/ | ||
case 5: | ||
_yield$this$uploader$3 = _context6.sent; | ||
cid = _yield$this$uploader$3.cid; | ||
fileNames = _yield$this$uploader$3.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
uris = fileNames.map(function (filename) { | ||
return "".concat(baseUri).concat(filename); | ||
}); | ||
return _context6.abrupt("return", { | ||
baseUri: baseUri, | ||
uris: uris | ||
}); | ||
case 11: | ||
case "end": | ||
return _context6.stop(); | ||
} | ||
} | ||
}, _callee6, this); | ||
})); | ||
buildFilePropertiesMap(object) { | ||
let files = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; | ||
function uploadMetadataBatch(_x12, _x13, _x14, _x15, _x16) { | ||
return _uploadMetadataBatch.apply(this, arguments); | ||
} | ||
if (Array.isArray(object)) { | ||
object.forEach(element => { | ||
this.buildFilePropertiesMap(element, files); | ||
}); | ||
} else if (object) { | ||
const values = Object.values(object); | ||
return uploadMetadataBatch; | ||
}() | ||
/** ************************* | ||
* PRIVATE FUNCTIONS | ||
*************************/ | ||
}, { | ||
key: "_get", | ||
value: function () { | ||
var _get3 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee7(hash) { | ||
var uri, result, nextUrl; | ||
return _regeneratorRuntime().wrap(function _callee7$(_context7) { | ||
while (1) { | ||
switch (_context7.prev = _context7.next) { | ||
case 0: | ||
uri = hash; | ||
if (hash) { | ||
uri = resolveGatewayUrl(hash, "ipfs://", this.gatewayUrl); | ||
} | ||
_context7.next = 4; | ||
return fetch__default["default"](uri); | ||
case 4: | ||
result = _context7.sent; | ||
if (!(!result.ok && result.status === 500)) { | ||
_context7.next = 7; | ||
break; | ||
} | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
case 7: | ||
if (!(!result.ok && result.status !== 404)) { | ||
_context7.next = 16; | ||
break; | ||
} | ||
nextUrl = this.getNextPublicGateway(); | ||
if (!nextUrl) { | ||
_context7.next = 15; | ||
break; | ||
} | ||
this.failedUrls.push(this.gatewayUrl); | ||
this.gatewayUrl = nextUrl; | ||
return _context7.abrupt("return", this._get(hash)); | ||
case 15: | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
case 16: | ||
return _context7.abrupt("return", result); | ||
case 17: | ||
case "end": | ||
return _context7.stop(); | ||
} | ||
} | ||
}, _callee7, this); | ||
})); | ||
function _get(_x17) { | ||
return _get3.apply(this, arguments); | ||
for (const val of values) { | ||
if (isFileInstance(val) || isBufferInstance(val)) { | ||
files.push(val); | ||
} else if (typeof val === "object") { | ||
this.buildFilePropertiesMap(val, files); | ||
} | ||
} | ||
} | ||
return _get; | ||
}() | ||
/** | ||
* Pre-processes metadata and uploads all file properties | ||
* to storage in *bulk*, then performs a string replacement of | ||
* all file properties -\> the resulting ipfs uri. This is | ||
* called internally by `uploadMetadataBatch`. | ||
* | ||
* @internal | ||
* | ||
* @returns - The processed metadata with properties pointing at ipfs in place of `File | Buffer` | ||
* @param metadatas | ||
* @param options | ||
*/ | ||
return files; | ||
} | ||
/** | ||
* FOR TESTING ONLY | ||
* @internal | ||
* @param data - | ||
* @param contractAddress - | ||
* @param signerAddress - | ||
*/ | ||
}, { | ||
key: "batchUploadProperties", | ||
value: function () { | ||
var _batchUploadProperties = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee8(metadatas, options) { | ||
var _this2 = this; | ||
var sanitizedMetadatas, filesToUpload, _yield$this$uploader$4, cid, fileNames, cids, _iterator, _step, filename; | ||
return _regeneratorRuntime().wrap(function _callee8$(_context8) { | ||
while (1) { | ||
switch (_context8.prev = _context8.next) { | ||
case 0: | ||
// replace all active gateway url links with their raw ipfs hash | ||
sanitizedMetadatas = replaceGatewayUrlWithHash(metadatas, "ipfs://", this.gatewayUrl); // extract any binary file to upload | ||
filesToUpload = sanitizedMetadatas.flatMap(function (m) { | ||
return _this2.buildFilePropertiesMap(m, []); | ||
}); // if no binary files to upload, return the metadata | ||
if (!(filesToUpload.length === 0)) { | ||
_context8.next = 4; | ||
break; | ||
} | ||
return _context8.abrupt("return", sanitizedMetadatas); | ||
case 4: | ||
_context8.next = 6; | ||
return this.uploader.uploadBatchWithCid(filesToUpload, undefined, undefined, undefined, options); | ||
case 6: | ||
_yield$this$uploader$4 = _context8.sent; | ||
cid = _yield$this$uploader$4.cid; | ||
fileNames = _yield$this$uploader$4.fileNames; | ||
cids = []; // recurse ordered array | ||
_iterator = _createForOfIteratorHelper(fileNames); | ||
try { | ||
for (_iterator.s(); !(_step = _iterator.n()).done;) { | ||
filename = _step.value; | ||
cids.push("".concat(cid, "/").concat(filename)); | ||
} // replace all files with their ipfs hash | ||
} catch (err) { | ||
_iterator.e(err); | ||
} finally { | ||
_iterator.f(); | ||
} | ||
return _context8.abrupt("return", replaceFilePropertiesWithHashes(sanitizedMetadatas, cids)); | ||
case 13: | ||
case "end": | ||
return _context8.stop(); | ||
} | ||
} | ||
}, _callee8, this); | ||
})); | ||
function batchUploadProperties(_x18, _x19) { | ||
return _batchUploadProperties.apply(this, arguments); | ||
async uploadSingle(data, contractAddress, signerAddress) { | ||
// TODO move down to IStorageUpload | ||
const token = await this.uploader.getUploadToken(contractAddress || ""); | ||
const metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress, | ||
signerAddress | ||
} | ||
}; | ||
const formData = new FormData__default["default"](); | ||
const filepath = "files"; // Root directory | ||
return batchUploadProperties; | ||
}() | ||
/** | ||
* This function recurisely traverses an object and hashes any | ||
* `Buffer` or `File` objects into the returned map. | ||
* | ||
* @param object - the Json Object | ||
* @param files - The running array of files or buffer to upload | ||
* @returns - The final map of all hashes to files | ||
*/ | ||
formData.append("file", data, filepath); | ||
formData.append("pinataMetadata", JSON.stringify(metadata)); | ||
formData.append("pinataOptions", JSON.stringify({ | ||
wrapWithDirectory: false | ||
})); | ||
const res = await fetch__default["default"](PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: { | ||
Authorization: "Bearer ".concat(token), | ||
...formData.getHeaders() | ||
}, | ||
body: formData.getBuffer() | ||
}); | ||
}, { | ||
key: "buildFilePropertiesMap", | ||
value: function buildFilePropertiesMap(object) { | ||
var _this3 = this; | ||
var files = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; | ||
if (Array.isArray(object)) { | ||
object.forEach(function (element) { | ||
_this3.buildFilePropertiesMap(element, files); | ||
}); | ||
} else if (object) { | ||
var values = Object.values(object); | ||
for (var _i = 0, _values = values; _i < _values.length; _i++) { | ||
var val = _values[_i]; | ||
if (isFileInstance(val) || isBufferInstance(val)) { | ||
files.push(val); | ||
} else if (_typeof(val) === "object") { | ||
this.buildFilePropertiesMap(val, files); | ||
} | ||
} | ||
} | ||
return files; | ||
if (!res.ok) { | ||
throw new Error("Failed to upload to IPFS [status code = ".concat(res.status, "]")); | ||
} | ||
/** | ||
* FOR TESTING ONLY | ||
* @internal | ||
* @param data - | ||
* @param contractAddress - | ||
* @param signerAddress - | ||
*/ | ||
}, { | ||
key: "uploadSingle", | ||
value: function () { | ||
var _uploadSingle = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee9(data, contractAddress, signerAddress) { | ||
var token, metadata, formData, filepath, res, body; | ||
return _regeneratorRuntime().wrap(function _callee9$(_context9) { | ||
while (1) { | ||
switch (_context9.prev = _context9.next) { | ||
case 0: | ||
_context9.next = 2; | ||
return this.uploader.getUploadToken(contractAddress || ""); | ||
const body = await res.json(); | ||
return body.IpfsHash; | ||
} | ||
case 2: | ||
token = _context9.sent; | ||
metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress: contractAddress, | ||
signerAddress: signerAddress | ||
} | ||
}; | ||
formData = new FormData__default["default"](); | ||
filepath = "files"; // Root directory | ||
} | ||
formData.append("file", data, filepath); | ||
formData.append("pinataMetadata", JSON.stringify(metadata)); | ||
formData.append("pinataOptions", JSON.stringify({ | ||
wrapWithDirectory: false | ||
})); | ||
_context9.next = 11; | ||
return fetch__default["default"](PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: _objectSpread2({ | ||
Authorization: "Bearer ".concat(token) | ||
}, formData.getHeaders()), | ||
body: formData.getBuffer() | ||
}); | ||
case 11: | ||
res = _context9.sent; | ||
if (res.ok) { | ||
_context9.next = 14; | ||
break; | ||
} | ||
throw new Error("Failed to upload to IPFS [status code = ".concat(res.status, "]")); | ||
case 14: | ||
_context9.next = 16; | ||
return res.json(); | ||
case 16: | ||
body = _context9.sent; | ||
return _context9.abrupt("return", body.IpfsHash); | ||
case 18: | ||
case "end": | ||
return _context9.stop(); | ||
} | ||
} | ||
}, _callee9, this); | ||
})); | ||
function uploadSingle(_x20, _x21, _x22) { | ||
return _uploadSingle.apply(this, arguments); | ||
} | ||
return uploadSingle; | ||
}() | ||
}]); | ||
return IpfsStorage; | ||
}(); | ||
/** | ||
@@ -1588,6 +670,4 @@ * Fetch and upload files to IPFS or any other storage. | ||
*/ | ||
var RemoteStorage = /*#__PURE__*/function () { | ||
function RemoteStorage(storage) { | ||
_classCallCheck(this, RemoteStorage); | ||
class RemoteStorage { | ||
constructor(storage) { | ||
_defineProperty(this, "storage", void 0); | ||
@@ -1613,191 +693,75 @@ | ||
_createClass(RemoteStorage, [{ | ||
key: "fetch", | ||
value: function () { | ||
var _fetch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(hash) { | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
return _context.abrupt("return", this.storage.get(hash)); | ||
async fetch(hash) { | ||
return this.storage.get(hash); | ||
} | ||
/** | ||
* Upload any data to an IPFS directory. We'll handle all the details for you, including | ||
* pinning your files and making sure that you get the fastest upload speeds. | ||
* | ||
* @example | ||
* ```javascript | ||
* // File upload | ||
* const files = [ | ||
* fs.readFileSync("file1.png"), | ||
* fs.readFileSync("file2.png"), | ||
* ] | ||
* const result = await sdk.storage.upload(files); | ||
* // uri for each uploaded file will look like something like: ipfs://<hash>/0 | ||
* | ||
* // JSON metadata upload | ||
* const jsonMetadata = { | ||
* name: "Name", | ||
* description: "Description", | ||
* } | ||
* const result = await sdk.storage.upload(jsonMetadata); | ||
* | ||
* // Upload progress (browser only) | ||
* const result = await sdk.storage.upload(files, { | ||
* onProgress: (event: UploadProgressEvent) => { | ||
* console.log(`Downloaded ${event.progress} / ${event.total}`); | ||
* }, | ||
* }); | ||
* ``` | ||
* | ||
* @param data - An array of file data or an array of JSON metadata to upload to IPFS | ||
* @param options - Optional. Upload progress callback. | ||
* @returns The IPFS hash of the directory that holds all the uploaded data | ||
*/ | ||
case 1: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function fetch(_x) { | ||
return _fetch.apply(this, arguments); | ||
async upload(data, options) { | ||
if (!Array.isArray(data)) { | ||
if (isFileInstance(data) || isBufferInstance(data) || data.name && data.data && isBufferInstance(data.data)) { | ||
return this.uploadBatch([data], options); | ||
} else { | ||
return this.uploadMetadataBatch([data], options); | ||
} | ||
} | ||
return fetch; | ||
}() | ||
/** | ||
* Upload any data to an IPFS directory. We'll handle all the details for you, including | ||
* pinning your files and making sure that you get the fastest upload speeds. | ||
* | ||
* @example | ||
* ```javascript | ||
* // File upload | ||
* const files = [ | ||
* fs.readFileSync("file1.png"), | ||
* fs.readFileSync("file2.png"), | ||
* ] | ||
* const result = await sdk.storage.upload(files); | ||
* // uri for each uploaded file will look like something like: ipfs://<hash>/0 | ||
* | ||
* // JSON metadata upload | ||
* const jsonMetadata = { | ||
* name: "Name", | ||
* description: "Description", | ||
* } | ||
* const result = await sdk.storage.upload(jsonMetadata); | ||
* | ||
* // Upload progress (browser only) | ||
* const result = await sdk.storage.upload(files, { | ||
* onProgress: (event: UploadProgressEvent) => { | ||
* console.log(`Downloaded ${event.progress} / ${event.total}`); | ||
* }, | ||
* }); | ||
* ``` | ||
* | ||
* @param data - An array of file data or an array of JSON metadata to upload to IPFS | ||
* @param options - Optional. Upload progress callback. | ||
* @returns The IPFS hash of the directory that holds all the uploaded data | ||
*/ | ||
const allFiles = data.filter(item => isFileInstance(item) || isBufferInstance(item) || item.name && item.data && isBufferInstance(item.data)); | ||
const allObjects = data.filter(item => !isFileInstance(item) && !isBufferInstance(item)); | ||
}, { | ||
key: "upload", | ||
value: function () { | ||
var _upload = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(data, options) { | ||
var allFiles, allObjects; | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
if (Array.isArray(data)) { | ||
_context2.next = 6; | ||
break; | ||
} | ||
if (allFiles.length === data.length) { | ||
return this.uploadBatch(data, options); | ||
} else if (allObjects.length === data.length) { | ||
return this.uploadMetadataBatch(data, options); | ||
} else { | ||
throw new Error("Data to upload must be either all files or all JSON objects"); | ||
} | ||
} | ||
if (!(isFileInstance(data) || isBufferInstance(data) || data.name && data.data && isBufferInstance(data.data))) { | ||
_context2.next = 5; | ||
break; | ||
} | ||
async uploadBatch(files, options) { | ||
return await this.storage.uploadBatch(files, undefined, undefined, undefined, options); | ||
} | ||
return _context2.abrupt("return", this.uploadBatch([data], options)); | ||
async uploadMetadataBatch(metadatas, options) { | ||
return await this.storage.uploadMetadataBatch(metadatas, undefined, undefined, undefined, options); | ||
} | ||
case 5: | ||
return _context2.abrupt("return", this.uploadMetadataBatch([data], options)); | ||
} | ||
case 6: | ||
allFiles = data.filter(function (item) { | ||
return isFileInstance(item) || isBufferInstance(item) || item.name && item.data && isBufferInstance(item.data); | ||
}); | ||
allObjects = data.filter(function (item) { | ||
return !isFileInstance(item) && !isBufferInstance(item); | ||
}); | ||
const isBrowser = () => typeof window !== "undefined"; | ||
const fileOrBufferUnion = isBrowser() ? [zod.z.instanceof(File), zod.z.string()] : [zod.z.instanceof(Buffer), zod.z.string()]; | ||
const FileBufferOrStringSchema = zod.z.union(fileOrBufferUnion); | ||
if (!(allFiles.length === data.length)) { | ||
_context2.next = 12; | ||
break; | ||
} | ||
return _context2.abrupt("return", this.uploadBatch(data, options)); | ||
case 12: | ||
if (!(allObjects.length === data.length)) { | ||
_context2.next = 16; | ||
break; | ||
} | ||
return _context2.abrupt("return", this.uploadMetadataBatch(data, options)); | ||
case 16: | ||
throw new Error("Data to upload must be either all files or all JSON objects"); | ||
case 17: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function upload(_x2, _x3) { | ||
return _upload.apply(this, arguments); | ||
} | ||
return upload; | ||
}() | ||
}, { | ||
key: "uploadBatch", | ||
value: function () { | ||
var _uploadBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee3(files, options) { | ||
return _regeneratorRuntime().wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this.storage.uploadBatch(files, undefined, undefined, undefined, options); | ||
case 2: | ||
return _context3.abrupt("return", _context3.sent); | ||
case 3: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
function uploadBatch(_x4, _x5) { | ||
return _uploadBatch.apply(this, arguments); | ||
} | ||
return uploadBatch; | ||
}() | ||
}, { | ||
key: "uploadMetadataBatch", | ||
value: function () { | ||
var _uploadMetadataBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee4(metadatas, options) { | ||
return _regeneratorRuntime().wrap(function _callee4$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return this.storage.uploadMetadataBatch(metadatas, undefined, undefined, undefined, options); | ||
case 2: | ||
return _context4.abrupt("return", _context4.sent); | ||
case 3: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function uploadMetadataBatch(_x6, _x7) { | ||
return _uploadMetadataBatch.apply(this, arguments); | ||
} | ||
return uploadMetadataBatch; | ||
}() | ||
}]); | ||
return RemoteStorage; | ||
}(); | ||
var isBrowser = function isBrowser() { | ||
return typeof window !== "undefined"; | ||
}; | ||
var fileOrBufferUnion = isBrowser() ? [zod.z["instanceof"](File), zod.z.string()] : [zod.z["instanceof"](Buffer), zod.z.string()]; | ||
var FileBufferOrStringSchema = zod.z.union(fileOrBufferUnion); | ||
exports.FileBufferOrStringSchema = FileBufferOrStringSchema; | ||
@@ -1804,0 +768,0 @@ exports.IpfsStorage = IpfsStorage; |
@@ -29,522 +29,6 @@ 'use strict'; | ||
function ownKeys(object, enumerableOnly) { | ||
var keys = Object.keys(object); | ||
if (Object.getOwnPropertySymbols) { | ||
var symbols = Object.getOwnPropertySymbols(object); | ||
enumerableOnly && (symbols = symbols.filter(function (sym) { | ||
return Object.getOwnPropertyDescriptor(object, sym).enumerable; | ||
})), keys.push.apply(keys, symbols); | ||
} | ||
return keys; | ||
} | ||
function _objectSpread2(target) { | ||
for (var i = 1; i < arguments.length; i++) { | ||
var source = null != arguments[i] ? arguments[i] : {}; | ||
i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { | ||
_defineProperty(target, key, source[key]); | ||
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { | ||
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); | ||
}); | ||
} | ||
return target; | ||
} | ||
function _typeof(obj) { | ||
"@babel/helpers - typeof"; | ||
return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { | ||
return typeof obj; | ||
} : function (obj) { | ||
return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; | ||
}, _typeof(obj); | ||
} | ||
function _arrayLikeToArray(arr, len) { | ||
if (len == null || len > arr.length) len = arr.length; | ||
for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; | ||
return arr2; | ||
} | ||
function _unsupportedIterableToArray(o, minLen) { | ||
if (!o) return; | ||
if (typeof o === "string") return _arrayLikeToArray(o, minLen); | ||
var n = Object.prototype.toString.call(o).slice(8, -1); | ||
if (n === "Object" && o.constructor) n = o.constructor.name; | ||
if (n === "Map" || n === "Set") return Array.from(o); | ||
if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); | ||
} | ||
function _createForOfIteratorHelper(o, allowArrayLike) { | ||
var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; | ||
if (!it) { | ||
if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { | ||
if (it) o = it; | ||
var i = 0; | ||
var F = function () {}; | ||
return { | ||
s: F, | ||
n: function () { | ||
if (i >= o.length) return { | ||
done: true | ||
}; | ||
return { | ||
done: false, | ||
value: o[i++] | ||
}; | ||
}, | ||
e: function (e) { | ||
throw e; | ||
}, | ||
f: F | ||
}; | ||
} | ||
throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); | ||
} | ||
var normalCompletion = true, | ||
didErr = false, | ||
err; | ||
return { | ||
s: function () { | ||
it = it.call(o); | ||
}, | ||
n: function () { | ||
var step = it.next(); | ||
normalCompletion = step.done; | ||
return step; | ||
}, | ||
e: function (e) { | ||
didErr = true; | ||
err = e; | ||
}, | ||
f: function () { | ||
try { | ||
if (!normalCompletion && it.return != null) it.return(); | ||
} finally { | ||
if (didErr) throw err; | ||
} | ||
} | ||
}; | ||
} | ||
function _regeneratorRuntime() { | ||
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */ | ||
_regeneratorRuntime = function () { | ||
return exports; | ||
}; | ||
var exports = {}, | ||
Op = Object.prototype, | ||
hasOwn = Op.hasOwnProperty, | ||
$Symbol = "function" == typeof Symbol ? Symbol : {}, | ||
iteratorSymbol = $Symbol.iterator || "@@iterator", | ||
asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator", | ||
toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag"; | ||
function define(obj, key, value) { | ||
return Object.defineProperty(obj, key, { | ||
value: value, | ||
enumerable: !0, | ||
configurable: !0, | ||
writable: !0 | ||
}), obj[key]; | ||
} | ||
try { | ||
define({}, ""); | ||
} catch (err) { | ||
define = function (obj, key, value) { | ||
return obj[key] = value; | ||
}; | ||
} | ||
function wrap(innerFn, outerFn, self, tryLocsList) { | ||
var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator, | ||
generator = Object.create(protoGenerator.prototype), | ||
context = new Context(tryLocsList || []); | ||
return generator._invoke = function (innerFn, self, context) { | ||
var state = "suspendedStart"; | ||
return function (method, arg) { | ||
if ("executing" === state) throw new Error("Generator is already running"); | ||
if ("completed" === state) { | ||
if ("throw" === method) throw arg; | ||
return doneResult(); | ||
} | ||
for (context.method = method, context.arg = arg;;) { | ||
var delegate = context.delegate; | ||
if (delegate) { | ||
var delegateResult = maybeInvokeDelegate(delegate, context); | ||
if (delegateResult) { | ||
if (delegateResult === ContinueSentinel) continue; | ||
return delegateResult; | ||
} | ||
} | ||
if ("next" === context.method) context.sent = context._sent = context.arg;else if ("throw" === context.method) { | ||
if ("suspendedStart" === state) throw state = "completed", context.arg; | ||
context.dispatchException(context.arg); | ||
} else "return" === context.method && context.abrupt("return", context.arg); | ||
state = "executing"; | ||
var record = tryCatch(innerFn, self, context); | ||
if ("normal" === record.type) { | ||
if (state = context.done ? "completed" : "suspendedYield", record.arg === ContinueSentinel) continue; | ||
return { | ||
value: record.arg, | ||
done: context.done | ||
}; | ||
} | ||
"throw" === record.type && (state = "completed", context.method = "throw", context.arg = record.arg); | ||
} | ||
}; | ||
}(innerFn, self, context), generator; | ||
} | ||
function tryCatch(fn, obj, arg) { | ||
try { | ||
return { | ||
type: "normal", | ||
arg: fn.call(obj, arg) | ||
}; | ||
} catch (err) { | ||
return { | ||
type: "throw", | ||
arg: err | ||
}; | ||
} | ||
} | ||
exports.wrap = wrap; | ||
var ContinueSentinel = {}; | ||
function Generator() {} | ||
function GeneratorFunction() {} | ||
function GeneratorFunctionPrototype() {} | ||
var IteratorPrototype = {}; | ||
define(IteratorPrototype, iteratorSymbol, function () { | ||
return this; | ||
}); | ||
var getProto = Object.getPrototypeOf, | ||
NativeIteratorPrototype = getProto && getProto(getProto(values([]))); | ||
NativeIteratorPrototype && NativeIteratorPrototype !== Op && hasOwn.call(NativeIteratorPrototype, iteratorSymbol) && (IteratorPrototype = NativeIteratorPrototype); | ||
var Gp = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(IteratorPrototype); | ||
function defineIteratorMethods(prototype) { | ||
["next", "throw", "return"].forEach(function (method) { | ||
define(prototype, method, function (arg) { | ||
return this._invoke(method, arg); | ||
}); | ||
}); | ||
} | ||
function AsyncIterator(generator, PromiseImpl) { | ||
function invoke(method, arg, resolve, reject) { | ||
var record = tryCatch(generator[method], generator, arg); | ||
if ("throw" !== record.type) { | ||
var result = record.arg, | ||
value = result.value; | ||
return value && "object" == typeof value && hasOwn.call(value, "__await") ? PromiseImpl.resolve(value.__await).then(function (value) { | ||
invoke("next", value, resolve, reject); | ||
}, function (err) { | ||
invoke("throw", err, resolve, reject); | ||
}) : PromiseImpl.resolve(value).then(function (unwrapped) { | ||
result.value = unwrapped, resolve(result); | ||
}, function (error) { | ||
return invoke("throw", error, resolve, reject); | ||
}); | ||
} | ||
reject(record.arg); | ||
} | ||
var previousPromise; | ||
this._invoke = function (method, arg) { | ||
function callInvokeWithMethodAndArg() { | ||
return new PromiseImpl(function (resolve, reject) { | ||
invoke(method, arg, resolve, reject); | ||
}); | ||
} | ||
return previousPromise = previousPromise ? previousPromise.then(callInvokeWithMethodAndArg, callInvokeWithMethodAndArg) : callInvokeWithMethodAndArg(); | ||
}; | ||
} | ||
function maybeInvokeDelegate(delegate, context) { | ||
var method = delegate.iterator[context.method]; | ||
if (undefined === method) { | ||
if (context.delegate = null, "throw" === context.method) { | ||
if (delegate.iterator.return && (context.method = "return", context.arg = undefined, maybeInvokeDelegate(delegate, context), "throw" === context.method)) return ContinueSentinel; | ||
context.method = "throw", context.arg = new TypeError("The iterator does not provide a 'throw' method"); | ||
} | ||
return ContinueSentinel; | ||
} | ||
var record = tryCatch(method, delegate.iterator, context.arg); | ||
if ("throw" === record.type) return context.method = "throw", context.arg = record.arg, context.delegate = null, ContinueSentinel; | ||
var info = record.arg; | ||
return info ? info.done ? (context[delegate.resultName] = info.value, context.next = delegate.nextLoc, "return" !== context.method && (context.method = "next", context.arg = undefined), context.delegate = null, ContinueSentinel) : info : (context.method = "throw", context.arg = new TypeError("iterator result is not an object"), context.delegate = null, ContinueSentinel); | ||
} | ||
function pushTryEntry(locs) { | ||
var entry = { | ||
tryLoc: locs[0] | ||
}; | ||
1 in locs && (entry.catchLoc = locs[1]), 2 in locs && (entry.finallyLoc = locs[2], entry.afterLoc = locs[3]), this.tryEntries.push(entry); | ||
} | ||
function resetTryEntry(entry) { | ||
var record = entry.completion || {}; | ||
record.type = "normal", delete record.arg, entry.completion = record; | ||
} | ||
function Context(tryLocsList) { | ||
this.tryEntries = [{ | ||
tryLoc: "root" | ||
}], tryLocsList.forEach(pushTryEntry, this), this.reset(!0); | ||
} | ||
function values(iterable) { | ||
if (iterable) { | ||
var iteratorMethod = iterable[iteratorSymbol]; | ||
if (iteratorMethod) return iteratorMethod.call(iterable); | ||
if ("function" == typeof iterable.next) return iterable; | ||
if (!isNaN(iterable.length)) { | ||
var i = -1, | ||
next = function next() { | ||
for (; ++i < iterable.length;) if (hasOwn.call(iterable, i)) return next.value = iterable[i], next.done = !1, next; | ||
return next.value = undefined, next.done = !0, next; | ||
}; | ||
return next.next = next; | ||
} | ||
} | ||
return { | ||
next: doneResult | ||
}; | ||
} | ||
function doneResult() { | ||
return { | ||
value: undefined, | ||
done: !0 | ||
}; | ||
} | ||
return GeneratorFunction.prototype = GeneratorFunctionPrototype, define(Gp, "constructor", GeneratorFunctionPrototype), define(GeneratorFunctionPrototype, "constructor", GeneratorFunction), GeneratorFunction.displayName = define(GeneratorFunctionPrototype, toStringTagSymbol, "GeneratorFunction"), exports.isGeneratorFunction = function (genFun) { | ||
var ctor = "function" == typeof genFun && genFun.constructor; | ||
return !!ctor && (ctor === GeneratorFunction || "GeneratorFunction" === (ctor.displayName || ctor.name)); | ||
}, exports.mark = function (genFun) { | ||
return Object.setPrototypeOf ? Object.setPrototypeOf(genFun, GeneratorFunctionPrototype) : (genFun.__proto__ = GeneratorFunctionPrototype, define(genFun, toStringTagSymbol, "GeneratorFunction")), genFun.prototype = Object.create(Gp), genFun; | ||
}, exports.awrap = function (arg) { | ||
return { | ||
__await: arg | ||
}; | ||
}, defineIteratorMethods(AsyncIterator.prototype), define(AsyncIterator.prototype, asyncIteratorSymbol, function () { | ||
return this; | ||
}), exports.AsyncIterator = AsyncIterator, exports.async = function (innerFn, outerFn, self, tryLocsList, PromiseImpl) { | ||
void 0 === PromiseImpl && (PromiseImpl = Promise); | ||
var iter = new AsyncIterator(wrap(innerFn, outerFn, self, tryLocsList), PromiseImpl); | ||
return exports.isGeneratorFunction(outerFn) ? iter : iter.next().then(function (result) { | ||
return result.done ? result.value : iter.next(); | ||
}); | ||
}, defineIteratorMethods(Gp), define(Gp, toStringTagSymbol, "Generator"), define(Gp, iteratorSymbol, function () { | ||
return this; | ||
}), define(Gp, "toString", function () { | ||
return "[object Generator]"; | ||
}), exports.keys = function (object) { | ||
var keys = []; | ||
for (var key in object) keys.push(key); | ||
return keys.reverse(), function next() { | ||
for (; keys.length;) { | ||
var key = keys.pop(); | ||
if (key in object) return next.value = key, next.done = !1, next; | ||
} | ||
return next.done = !0, next; | ||
}; | ||
}, exports.values = values, Context.prototype = { | ||
constructor: Context, | ||
reset: function (skipTempReset) { | ||
if (this.prev = 0, this.next = 0, this.sent = this._sent = undefined, this.done = !1, this.delegate = null, this.method = "next", this.arg = undefined, this.tryEntries.forEach(resetTryEntry), !skipTempReset) for (var name in this) "t" === name.charAt(0) && hasOwn.call(this, name) && !isNaN(+name.slice(1)) && (this[name] = undefined); | ||
}, | ||
stop: function () { | ||
this.done = !0; | ||
var rootRecord = this.tryEntries[0].completion; | ||
if ("throw" === rootRecord.type) throw rootRecord.arg; | ||
return this.rval; | ||
}, | ||
dispatchException: function (exception) { | ||
if (this.done) throw exception; | ||
var context = this; | ||
function handle(loc, caught) { | ||
return record.type = "throw", record.arg = exception, context.next = loc, caught && (context.method = "next", context.arg = undefined), !!caught; | ||
} | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i], | ||
record = entry.completion; | ||
if ("root" === entry.tryLoc) return handle("end"); | ||
if (entry.tryLoc <= this.prev) { | ||
var hasCatch = hasOwn.call(entry, "catchLoc"), | ||
hasFinally = hasOwn.call(entry, "finallyLoc"); | ||
if (hasCatch && hasFinally) { | ||
if (this.prev < entry.catchLoc) return handle(entry.catchLoc, !0); | ||
if (this.prev < entry.finallyLoc) return handle(entry.finallyLoc); | ||
} else if (hasCatch) { | ||
if (this.prev < entry.catchLoc) return handle(entry.catchLoc, !0); | ||
} else { | ||
if (!hasFinally) throw new Error("try statement without catch or finally"); | ||
if (this.prev < entry.finallyLoc) return handle(entry.finallyLoc); | ||
} | ||
} | ||
} | ||
}, | ||
abrupt: function (type, arg) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.tryLoc <= this.prev && hasOwn.call(entry, "finallyLoc") && this.prev < entry.finallyLoc) { | ||
var finallyEntry = entry; | ||
break; | ||
} | ||
} | ||
finallyEntry && ("break" === type || "continue" === type) && finallyEntry.tryLoc <= arg && arg <= finallyEntry.finallyLoc && (finallyEntry = null); | ||
var record = finallyEntry ? finallyEntry.completion : {}; | ||
return record.type = type, record.arg = arg, finallyEntry ? (this.method = "next", this.next = finallyEntry.finallyLoc, ContinueSentinel) : this.complete(record); | ||
}, | ||
complete: function (record, afterLoc) { | ||
if ("throw" === record.type) throw record.arg; | ||
return "break" === record.type || "continue" === record.type ? this.next = record.arg : "return" === record.type ? (this.rval = this.arg = record.arg, this.method = "return", this.next = "end") : "normal" === record.type && afterLoc && (this.next = afterLoc), ContinueSentinel; | ||
}, | ||
finish: function (finallyLoc) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.finallyLoc === finallyLoc) return this.complete(entry.completion, entry.afterLoc), resetTryEntry(entry), ContinueSentinel; | ||
} | ||
}, | ||
catch: function (tryLoc) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.tryLoc === tryLoc) { | ||
var record = entry.completion; | ||
if ("throw" === record.type) { | ||
var thrown = record.arg; | ||
resetTryEntry(entry); | ||
} | ||
return thrown; | ||
} | ||
} | ||
throw new Error("illegal catch attempt"); | ||
}, | ||
delegateYield: function (iterable, resultName, nextLoc) { | ||
return this.delegate = { | ||
iterator: values(iterable), | ||
resultName: resultName, | ||
nextLoc: nextLoc | ||
}, "next" === this.method && (this.arg = undefined), ContinueSentinel; | ||
} | ||
}, exports; | ||
} | ||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { | ||
try { | ||
var info = gen[key](arg); | ||
var value = info.value; | ||
} catch (error) { | ||
reject(error); | ||
return; | ||
} | ||
if (info.done) { | ||
resolve(value); | ||
} else { | ||
Promise.resolve(value).then(_next, _throw); | ||
} | ||
} | ||
function _asyncToGenerator(fn) { | ||
return function () { | ||
var self = this, | ||
args = arguments; | ||
return new Promise(function (resolve, reject) { | ||
var gen = fn.apply(self, args); | ||
function _next(value) { | ||
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); | ||
} | ||
function _throw(err) { | ||
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); | ||
} | ||
_next(undefined); | ||
}); | ||
}; | ||
} | ||
function _classCallCheck(instance, Constructor) { | ||
if (!(instance instanceof Constructor)) { | ||
throw new TypeError("Cannot call a class as a function"); | ||
} | ||
} | ||
function _defineProperties(target, props) { | ||
for (var i = 0; i < props.length; i++) { | ||
var descriptor = props[i]; | ||
descriptor.enumerable = descriptor.enumerable || false; | ||
descriptor.configurable = true; | ||
if ("value" in descriptor) descriptor.writable = true; | ||
Object.defineProperty(target, descriptor.key, descriptor); | ||
} | ||
} | ||
function _createClass(Constructor, protoProps, staticProps) { | ||
if (protoProps) _defineProperties(Constructor.prototype, protoProps); | ||
if (staticProps) _defineProperties(Constructor, staticProps); | ||
Object.defineProperty(Constructor, "prototype", { | ||
writable: false | ||
}); | ||
return Constructor; | ||
} | ||
/** | ||
* @internal | ||
*/ | ||
var DEFAULT_IPFS_GATEWAY = "https://gateway.ipfscdn.io/ipfs/"; | ||
const DEFAULT_IPFS_GATEWAY = "https://gateway.ipfscdn.io/ipfs/"; | ||
/** | ||
@@ -554,3 +38,3 @@ * @internal | ||
var PUBLIC_GATEWAYS = ["https://gateway.ipfscdn.io/ipfs/", "https://cloudflare-ipfs.com/ipfs/", "https://ipfs.io/ipfs/"]; | ||
const PUBLIC_GATEWAYS = ["https://gateway.ipfscdn.io/ipfs/", "https://cloudflare-ipfs.com/ipfs/", "https://ipfs.io/ipfs/"]; | ||
/** | ||
@@ -560,3 +44,3 @@ * @internal | ||
var TW_IPFS_SERVER_URL = "https://upload.nftlabs.co"; | ||
const TW_IPFS_SERVER_URL = "https://upload.nftlabs.co"; | ||
/** | ||
@@ -566,3 +50,3 @@ * @internal | ||
var PINATA_IPFS_URL = "https://api.pinata.cloud/pinning/pinFileToIPFS"; | ||
const PINATA_IPFS_URL = "https://api.pinata.cloud/pinning/pinFileToIPFS"; | ||
@@ -589,9 +73,9 @@ function isFileInstance(data) { | ||
function replaceFilePropertiesWithHashes(object, cids) { | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
var isFile = isFileInstance(val) || isBufferInstance(val); | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
const isFile = isFileInstance(val) || isBufferInstance(val); | ||
if (_typeof(val) === "object" && !isFile) { | ||
if (typeof val === "object" && !isFile) { | ||
replaceFilePropertiesWithHashes(val, cids); | ||
@@ -623,11 +107,11 @@ continue; | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
object[keys[key]] = resolveGatewayUrl(val, scheme, gatewayUrl); | ||
if (Array.isArray(val)) { | ||
object[keys[key]] = val.map(function (el) { | ||
if (_typeof(el) === "object") { | ||
object[keys[key]] = val.map(el => { | ||
if (typeof el === "object") { | ||
return replaceHashWithGatewayUrl(el, scheme, gatewayUrl); | ||
@@ -640,3 +124,3 @@ } else { | ||
if (_typeof(val) === "object") { | ||
if (typeof val === "object") { | ||
replaceHashWithGatewayUrl(val, scheme, gatewayUrl); | ||
@@ -661,13 +145,13 @@ } | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
object[keys[key]] = toIPFSHash(val, scheme, gatewayUrl); | ||
if (Array.isArray(val)) { | ||
object[keys[key]] = val.map(function (el) { | ||
var isFile = isFileInstance(el) || isBufferInstance(el); | ||
object[keys[key]] = val.map(el => { | ||
const isFile = isFileInstance(el) || isBufferInstance(el); | ||
if (_typeof(el) === "object" && !isFile) { | ||
if (typeof el === "object" && !isFile) { | ||
return replaceGatewayUrlWithHash(el, scheme, gatewayUrl); | ||
@@ -680,5 +164,5 @@ } else { | ||
var isFile = isFileInstance(val) || isBufferInstance(val); | ||
const isFile = isFileInstance(val) || isBufferInstance(val); | ||
if (_typeof(val) === "object" && !isFile) { | ||
if (typeof val === "object" && !isFile) { | ||
replaceGatewayUrlWithHash(val, scheme, gatewayUrl); | ||
@@ -728,274 +212,178 @@ } | ||
var PinataUploader = /*#__PURE__*/function () { | ||
function PinataUploader() { | ||
_classCallCheck(this, PinataUploader); | ||
} | ||
class PinataUploader { | ||
/** | ||
* Fetches a one-time-use upload token that can used to upload | ||
* a file to storage. | ||
* | ||
* @returns - The one time use token that can be passed to the Pinata API. | ||
*/ | ||
async getUploadToken(contractAddress) { | ||
const headers = { | ||
"X-App-Name": "CONSOLE-TS-SDK-".concat(contractAddress) | ||
}; | ||
const res = await fetch__default["default"]("".concat(TW_IPFS_SERVER_URL, "/grant"), { | ||
method: "GET", | ||
headers | ||
}); | ||
_createClass(PinataUploader, [{ | ||
key: "getUploadToken", | ||
value: | ||
/** | ||
* Fetches a one-time-use upload token that can used to upload | ||
* a file to storage. | ||
* | ||
* @returns - The one time use token that can be passed to the Pinata API. | ||
*/ | ||
function () { | ||
var _getUploadToken = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(contractAddress) { | ||
var headers, res, body; | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
headers = { | ||
"X-App-Name": "CONSOLE-TS-SDK-".concat(contractAddress) | ||
}; | ||
_context.next = 3; | ||
return fetch__default["default"]("".concat(TW_IPFS_SERVER_URL, "/grant"), { | ||
method: "GET", | ||
headers: headers | ||
}); | ||
if (!res.ok) { | ||
throw new Error("Failed to get upload token"); | ||
} | ||
case 3: | ||
res = _context.sent; | ||
const body = await res.text(); | ||
return body; | ||
} | ||
if (res.ok) { | ||
_context.next = 6; | ||
break; | ||
} | ||
async uploadBatchWithCid(files) { | ||
let fileStartNumber = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; | ||
let contractAddress = arguments.length > 2 ? arguments[2] : undefined; | ||
let signerAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let options = arguments.length > 4 ? arguments[4] : undefined; | ||
const token = await this.getUploadToken(contractAddress || ""); | ||
const formData = new FormData__default["default"](); | ||
const { | ||
data, | ||
fileNames | ||
} = this.buildFormData(formData, files, fileStartNumber, contractAddress, signerAddress); | ||
throw new Error("Failed to get upload token"); | ||
if (typeof window === "undefined") { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
console.warn("The onProgress option is only supported in the browser"); | ||
} | ||
case 6: | ||
_context.next = 8; | ||
return res.text(); | ||
const res = await fetch__default["default"](PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: { | ||
Authorization: "Bearer ".concat(token), | ||
...data.getHeaders() | ||
}, | ||
body: data.getBuffer() | ||
}); | ||
const body = await res.json(); | ||
case 8: | ||
body = _context.sent; | ||
return _context.abrupt("return", body); | ||
if (!res.ok) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
case 10: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee); | ||
})); | ||
const cid = body.IpfsHash; | ||
function getUploadToken(_x) { | ||
return _getUploadToken.apply(this, arguments); | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
return getUploadToken; | ||
}() | ||
}, { | ||
key: "uploadBatchWithCid", | ||
value: function () { | ||
var _uploadBatchWithCid = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(files) { | ||
var fileStartNumber, | ||
contractAddress, | ||
signerAddress, | ||
options, | ||
token, | ||
formData, | ||
_this$buildFormData, | ||
data, | ||
fileNames, | ||
res, | ||
body, | ||
cid, | ||
_args2 = arguments; | ||
return { | ||
cid, | ||
fileNames | ||
}; | ||
} else { | ||
return new Promise((resolve, reject) => { | ||
const xhr = new XMLHttpRequest(); | ||
xhr.open("POST", PINATA_IPFS_URL); | ||
xhr.setRequestHeader("Authorization", "Bearer ".concat(token)); | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
fileStartNumber = _args2.length > 1 && _args2[1] !== undefined ? _args2[1] : 0; | ||
contractAddress = _args2.length > 2 ? _args2[2] : undefined; | ||
signerAddress = _args2.length > 3 ? _args2[3] : undefined; | ||
options = _args2.length > 4 ? _args2[4] : undefined; | ||
_context2.next = 6; | ||
return this.getUploadToken(contractAddress || ""); | ||
xhr.onloadend = () => { | ||
if (xhr.status !== 200) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
case 6: | ||
token = _context2.sent; | ||
formData = new FormData__default["default"](); | ||
_this$buildFormData = this.buildFormData(formData, files, fileStartNumber, contractAddress, signerAddress), data = _this$buildFormData.data, fileNames = _this$buildFormData.fileNames; | ||
const cid = JSON.parse(xhr.responseText).IpfsHash; | ||
if (!(typeof window === "undefined")) { | ||
_context2.next = 25; | ||
break; | ||
} | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
console.warn("The onProgress option is only supported in the browser"); | ||
} | ||
resolve({ | ||
cid, | ||
fileNames | ||
}); | ||
}; | ||
_context2.next = 13; | ||
return fetch__default["default"](PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: _objectSpread2({ | ||
Authorization: "Bearer ".concat(token) | ||
}, data.getHeaders()), | ||
body: data.getBuffer() | ||
}); | ||
xhr.onerror = err => { | ||
reject(err); | ||
}; | ||
case 13: | ||
res = _context2.sent; | ||
_context2.next = 16; | ||
return res.json(); | ||
if (xhr.upload) { | ||
xhr.upload.onprogress = event => { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
options === null || options === void 0 ? void 0 : options.onProgress({ | ||
progress: event.loaded, | ||
total: event.total | ||
}); | ||
} | ||
}; | ||
} | ||
case 16: | ||
body = _context2.sent; | ||
xhr.send(data); | ||
}); | ||
} | ||
} | ||
if (res.ok) { | ||
_context2.next = 19; | ||
break; | ||
} | ||
buildFormData(data, files) { | ||
let fileStartNumber = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; | ||
let contractAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let signerAddress = arguments.length > 4 ? arguments[4] : undefined; | ||
const metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress, | ||
signerAddress | ||
} | ||
}; | ||
const fileNames = []; | ||
files.forEach((file, i) => { | ||
let fileName = ""; | ||
let fileData = file; // if it is a file, we passthrough the file extensions, | ||
// if it is a buffer or string, the filename would be fileStartNumber + index | ||
// if it is a buffer or string with names, the filename would be the name | ||
throw new Error("Failed to upload files to IPFS"); | ||
if (isFileInstance(file)) { | ||
let extensions = ""; | ||
case 19: | ||
cid = body.IpfsHash; | ||
if (file.name) { | ||
const extensionStartIndex = file.name.lastIndexOf("."); | ||
if (cid) { | ||
_context2.next = 22; | ||
break; | ||
} | ||
throw new Error("Failed to upload files to IPFS"); | ||
case 22: | ||
return _context2.abrupt("return", { | ||
cid: cid, | ||
fileNames: fileNames | ||
}); | ||
case 25: | ||
return _context2.abrupt("return", new Promise(function (resolve, reject) { | ||
var xhr = new XMLHttpRequest(); | ||
xhr.open("POST", PINATA_IPFS_URL); | ||
xhr.setRequestHeader("Authorization", "Bearer ".concat(token)); | ||
xhr.onloadend = function () { | ||
if (xhr.status !== 200) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
var cid = JSON.parse(xhr.responseText).IpfsHash; | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
resolve({ | ||
cid: cid, | ||
fileNames: fileNames | ||
}); | ||
}; | ||
xhr.onerror = function (err) { | ||
reject(err); | ||
}; | ||
if (xhr.upload) { | ||
xhr.upload.onprogress = function (event) { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
options === null || options === void 0 ? void 0 : options.onProgress({ | ||
progress: event.loaded, | ||
total: event.total | ||
}); | ||
} | ||
}; | ||
} | ||
xhr.send(data); | ||
})); | ||
case 26: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
if (extensionStartIndex > -1) { | ||
extensions = file.name.substring(extensionStartIndex); | ||
} | ||
}, _callee2, this); | ||
})); | ||
} | ||
function uploadBatchWithCid(_x2) { | ||
return _uploadBatchWithCid.apply(this, arguments); | ||
fileName = "".concat(i + fileStartNumber).concat(extensions); | ||
} else if (isBufferInstance(file) || typeof file === "string") { | ||
fileName = "".concat(i + fileStartNumber); | ||
} else if (file && file.name && file !== null && file !== void 0 && file.data) { | ||
fileData = file === null || file === void 0 ? void 0 : file.data; | ||
fileName = "".concat(file.name); | ||
} else { | ||
// default behavior | ||
fileName = "".concat(i + fileStartNumber); | ||
} | ||
return uploadBatchWithCid; | ||
}() | ||
}, { | ||
key: "buildFormData", | ||
value: function buildFormData(data, files) { | ||
var fileStartNumber = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; | ||
var contractAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
var signerAddress = arguments.length > 4 ? arguments[4] : undefined; | ||
var metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress: contractAddress, | ||
signerAddress: signerAddress | ||
} | ||
}; | ||
var fileNames = []; | ||
files.forEach(function (file, i) { | ||
var fileName = ""; | ||
var fileData = file; // if it is a file, we passthrough the file extensions, | ||
// if it is a buffer or string, the filename would be fileStartNumber + index | ||
// if it is a buffer or string with names, the filename would be the name | ||
const filepath = "files/".concat(fileName); | ||
if (isFileInstance(file)) { | ||
var extensions = ""; | ||
if (fileNames.indexOf(fileName) > -1) { | ||
throw new Error("DUPLICATE_FILE_NAME_ERROR: File name ".concat(fileName, " was passed for more than one file.")); | ||
} | ||
if (file.name) { | ||
var extensionStartIndex = file.name.lastIndexOf("."); | ||
fileNames.push(fileName); | ||
if (extensionStartIndex > -1) { | ||
extensions = file.name.substring(extensionStartIndex); | ||
} | ||
} | ||
if (typeof window === "undefined") { | ||
data.append("file", fileData, { | ||
filepath | ||
}); | ||
} else { | ||
// browser does blob things, filepath is parsed differently on browser vs node. | ||
// pls pinata? | ||
data.append("file", new Blob([fileData]), filepath); | ||
} | ||
}); | ||
data.append("pinataMetadata", JSON.stringify(metadata)); | ||
return { | ||
data, | ||
fileNames | ||
}; | ||
} | ||
fileName = "".concat(i + fileStartNumber).concat(extensions); | ||
} else if (isBufferInstance(file) || typeof file === "string") { | ||
fileName = "".concat(i + fileStartNumber); | ||
} else if (file && file.name && file !== null && file !== void 0 && file.data) { | ||
fileData = file === null || file === void 0 ? void 0 : file.data; | ||
fileName = "".concat(file.name); | ||
} else { | ||
// default behavior | ||
fileName = "".concat(i + fileStartNumber); | ||
} | ||
} | ||
var filepath = "files/".concat(fileName); | ||
if (fileNames.indexOf(fileName) > -1) { | ||
throw new Error("DUPLICATE_FILE_NAME_ERROR: File name ".concat(fileName, " was passed for more than one file.")); | ||
} | ||
fileNames.push(fileName); | ||
if (typeof window === "undefined") { | ||
data.append("file", fileData, { | ||
filepath: filepath | ||
}); | ||
} else { | ||
// browser does blob things, filepath is parsed differently on browser vs node. | ||
// pls pinata? | ||
data.append("file", new Blob([fileData]), filepath); | ||
} | ||
}); | ||
data.append("pinataMetadata", JSON.stringify(metadata)); | ||
return { | ||
data: data, | ||
fileNames: fileNames | ||
}; | ||
} | ||
}]); | ||
return PinataUploader; | ||
}(); | ||
/** | ||
@@ -1007,3 +395,3 @@ * IPFS Storage implementation, accepts custom IPFS gateways | ||
var IpfsStorage = /*#__PURE__*/function () { | ||
class IpfsStorage { | ||
/** | ||
@@ -1013,8 +401,6 @@ * {@inheritdoc IStorage.gatewayUrl} | ||
*/ | ||
function IpfsStorage() { | ||
var gatewayUrl = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : DEFAULT_IPFS_GATEWAY; | ||
var uploader = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : new PinataUploader(); | ||
constructor() { | ||
let gatewayUrl = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : DEFAULT_IPFS_GATEWAY; | ||
let uploader = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : new PinataUploader(); | ||
_classCallCheck(this, IpfsStorage); | ||
_defineProperty(this, "gatewayUrl", void 0); | ||
@@ -1030,555 +416,251 @@ | ||
_createClass(IpfsStorage, [{ | ||
key: "getNextPublicGateway", | ||
value: function getNextPublicGateway() { | ||
var _this = this; | ||
getNextPublicGateway() { | ||
const urlsToTry = PUBLIC_GATEWAYS.filter(url => !this.failedUrls.includes(url)).filter(url => url !== this.gatewayUrl); | ||
var urlsToTry = PUBLIC_GATEWAYS.filter(function (url) { | ||
return !_this.failedUrls.includes(url); | ||
}).filter(function (url) { | ||
return url !== _this.gatewayUrl; | ||
}); | ||
if (urlsToTry.length > 0) { | ||
return urlsToTry[0]; | ||
} else { | ||
this.failedUrls = []; | ||
return undefined; | ||
} | ||
if (urlsToTry.length > 0) { | ||
return urlsToTry[0]; | ||
} else { | ||
this.failedUrls = []; | ||
return undefined; | ||
} | ||
/** | ||
* Upload a file to IPFS and return the hash | ||
* @remarks This method is a wrapper around {@link IStorage.upload} | ||
* @example | ||
* ```javascript | ||
* const file = './path/to/file.png'; // Can be a path or a File object such as a file from an input element. | ||
* const hash = await sdk.storage.upload(file); | ||
* ``` | ||
* | ||
* | ||
*/ | ||
} | ||
/** | ||
* Upload a file to IPFS and return the hash | ||
* @remarks This method is a wrapper around {@link IStorage.upload} | ||
* @example | ||
* ```javascript | ||
* const file = './path/to/file.png'; // Can be a path or a File object such as a file from an input element. | ||
* const hash = await sdk.storage.upload(file); | ||
* ``` | ||
* | ||
* | ||
*/ | ||
}, { | ||
key: "upload", | ||
value: function () { | ||
var _upload = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(data, contractAddress, signerAddress, options) { | ||
var _yield$this$uploader$, cid, fileNames, baseUri; | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
_context.next = 2; | ||
return this.uploader.uploadBatchWithCid([data], 0, contractAddress, signerAddress, options); | ||
async upload(data, contractAddress, signerAddress, options) { | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid([data], 0, contractAddress, signerAddress, options); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
return "".concat(baseUri).concat(fileNames[0]); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadBatch} | ||
*/ | ||
case 2: | ||
_yield$this$uploader$ = _context.sent; | ||
cid = _yield$this$uploader$.cid; | ||
fileNames = _yield$this$uploader$.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
return _context.abrupt("return", "".concat(baseUri).concat(fileNames[0])); | ||
case 7: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
async uploadBatch(files) { | ||
let fileStartNumber = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; | ||
let contractAddress = arguments.length > 2 ? arguments[2] : undefined; | ||
let signerAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let options = arguments.length > 4 ? arguments[4] : undefined; | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(files, fileStartNumber, contractAddress, signerAddress, options); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
const uris = fileNames.map(filename => "".concat(baseUri).concat(filename)); | ||
return { | ||
baseUri, | ||
uris | ||
}; | ||
} | ||
/** | ||
* {@inheritDoc IStorage.get} | ||
*/ | ||
function upload(_x, _x2, _x3, _x4) { | ||
return _upload.apply(this, arguments); | ||
} | ||
return upload; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadBatch} | ||
*/ | ||
async get(hash) { | ||
const res = await this._get(hash); | ||
const json = await res.json(); | ||
return replaceHashWithGatewayUrl(json, "ipfs://", this.gatewayUrl); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.getRaw} | ||
*/ | ||
}, { | ||
key: "uploadBatch", | ||
value: function () { | ||
var _uploadBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(files) { | ||
var fileStartNumber, | ||
contractAddress, | ||
signerAddress, | ||
options, | ||
_yield$this$uploader$2, | ||
cid, | ||
fileNames, | ||
baseUri, | ||
uris, | ||
_args2 = arguments; | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
fileStartNumber = _args2.length > 1 && _args2[1] !== undefined ? _args2[1] : 0; | ||
contractAddress = _args2.length > 2 ? _args2[2] : undefined; | ||
signerAddress = _args2.length > 3 ? _args2[3] : undefined; | ||
options = _args2.length > 4 ? _args2[4] : undefined; | ||
_context2.next = 6; | ||
return this.uploader.uploadBatchWithCid(files, fileStartNumber, contractAddress, signerAddress, options); | ||
async getRaw(hash) { | ||
const res = await this._get(hash); | ||
return await res.text(); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadata} | ||
*/ | ||
case 6: | ||
_yield$this$uploader$2 = _context2.sent; | ||
cid = _yield$this$uploader$2.cid; | ||
fileNames = _yield$this$uploader$2.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
uris = fileNames.map(function (filename) { | ||
return "".concat(baseUri).concat(filename); | ||
}); | ||
return _context2.abrupt("return", { | ||
baseUri: baseUri, | ||
uris: uris | ||
}); | ||
case 12: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
async uploadMetadata(metadata, contractAddress, signerAddress, options) { | ||
// since there's only single object, always use the first index | ||
const { | ||
uris | ||
} = await this.uploadMetadataBatch([metadata], 0, contractAddress, signerAddress, options); | ||
return uris[0]; | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadataBatch} | ||
*/ | ||
function uploadBatch(_x5) { | ||
return _uploadBatch.apply(this, arguments); | ||
} | ||
return uploadBatch; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.get} | ||
*/ | ||
async uploadMetadataBatch(metadatas, fileStartNumber, contractAddress, signerAddress, options) { | ||
const metadataToUpload = (await this.batchUploadProperties(metadatas, options)).map(m => JSON.stringify(m)); | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(metadataToUpload, fileStartNumber, contractAddress, signerAddress); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
const uris = fileNames.map(filename => "".concat(baseUri).concat(filename)); | ||
return { | ||
baseUri, | ||
uris | ||
}; | ||
} | ||
/** ************************* | ||
* PRIVATE FUNCTIONS | ||
*************************/ | ||
}, { | ||
key: "get", | ||
value: function () { | ||
var _get2 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee3(hash) { | ||
var res, json; | ||
return _regeneratorRuntime().wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this._get(hash); | ||
case 2: | ||
res = _context3.sent; | ||
_context3.next = 5; | ||
return res.json(); | ||
async _get(hash) { | ||
let uri = hash; | ||
case 5: | ||
json = _context3.sent; | ||
return _context3.abrupt("return", replaceHashWithGatewayUrl(json, "ipfs://", this.gatewayUrl)); | ||
if (hash) { | ||
uri = resolveGatewayUrl(hash, "ipfs://", this.gatewayUrl); | ||
} | ||
case 7: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
const result = await fetch__default["default"](uri); | ||
function get(_x6) { | ||
return _get2.apply(this, arguments); | ||
} | ||
if (!result.ok && result.status === 500) { | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
} | ||
return get; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.getRaw} | ||
*/ | ||
if (!result.ok && result.status !== 404) { | ||
const nextUrl = this.getNextPublicGateway(); | ||
}, { | ||
key: "getRaw", | ||
value: function () { | ||
var _getRaw = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee4(hash) { | ||
var res; | ||
return _regeneratorRuntime().wrap(function _callee4$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return this._get(hash); | ||
case 2: | ||
res = _context4.sent; | ||
_context4.next = 5; | ||
return res.text(); | ||
case 5: | ||
return _context4.abrupt("return", _context4.sent); | ||
case 6: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function getRaw(_x7) { | ||
return _getRaw.apply(this, arguments); | ||
if (nextUrl) { | ||
this.failedUrls.push(this.gatewayUrl); | ||
this.gatewayUrl = nextUrl; | ||
return this._get(hash); | ||
} else { | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
} | ||
} | ||
return getRaw; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadata} | ||
*/ | ||
return result; | ||
} | ||
/** | ||
* Pre-processes metadata and uploads all file properties | ||
* to storage in *bulk*, then performs a string replacement of | ||
* all file properties -\> the resulting ipfs uri. This is | ||
* called internally by `uploadMetadataBatch`. | ||
* | ||
* @internal | ||
* | ||
* @returns - The processed metadata with properties pointing at ipfs in place of `File | Buffer` | ||
* @param metadatas | ||
* @param options | ||
*/ | ||
}, { | ||
key: "uploadMetadata", | ||
value: function () { | ||
var _uploadMetadata = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee5(metadata, contractAddress, signerAddress, options) { | ||
var _yield$this$uploadMet, uris; | ||
return _regeneratorRuntime().wrap(function _callee5$(_context5) { | ||
while (1) { | ||
switch (_context5.prev = _context5.next) { | ||
case 0: | ||
_context5.next = 2; | ||
return this.uploadMetadataBatch([metadata], 0, contractAddress, signerAddress, options); | ||
async batchUploadProperties(metadatas, options) { | ||
// replace all active gateway url links with their raw ipfs hash | ||
const sanitizedMetadatas = replaceGatewayUrlWithHash(metadatas, "ipfs://", this.gatewayUrl); // extract any binary file to upload | ||
case 2: | ||
_yield$this$uploadMet = _context5.sent; | ||
uris = _yield$this$uploadMet.uris; | ||
return _context5.abrupt("return", uris[0]); | ||
const filesToUpload = sanitizedMetadatas.flatMap(m => this.buildFilePropertiesMap(m, [])); // if no binary files to upload, return the metadata | ||
case 5: | ||
case "end": | ||
return _context5.stop(); | ||
} | ||
} | ||
}, _callee5, this); | ||
})); | ||
if (filesToUpload.length === 0) { | ||
return sanitizedMetadatas; | ||
} // otherwise upload those files | ||
function uploadMetadata(_x8, _x9, _x10, _x11) { | ||
return _uploadMetadata.apply(this, arguments); | ||
} | ||
return uploadMetadata; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadataBatch} | ||
*/ | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(filesToUpload, undefined, undefined, undefined, options); | ||
const cids = []; // recurse ordered array | ||
}, { | ||
key: "uploadMetadataBatch", | ||
value: function () { | ||
var _uploadMetadataBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee6(metadatas, fileStartNumber, contractAddress, signerAddress, options) { | ||
var metadataToUpload, _yield$this$uploader$3, cid, fileNames, baseUri, uris; | ||
for (const filename of fileNames) { | ||
cids.push("".concat(cid, "/").concat(filename)); | ||
} // replace all files with their ipfs hash | ||
return _regeneratorRuntime().wrap(function _callee6$(_context6) { | ||
while (1) { | ||
switch (_context6.prev = _context6.next) { | ||
case 0: | ||
_context6.next = 2; | ||
return this.batchUploadProperties(metadatas, options); | ||
case 2: | ||
metadataToUpload = _context6.sent.map(function (m) { | ||
return JSON.stringify(m); | ||
}); | ||
_context6.next = 5; | ||
return this.uploader.uploadBatchWithCid(metadataToUpload, fileStartNumber, contractAddress, signerAddress); | ||
return replaceFilePropertiesWithHashes(sanitizedMetadatas, cids); | ||
} | ||
/** | ||
* This function recurisely traverses an object and hashes any | ||
* `Buffer` or `File` objects into the returned map. | ||
* | ||
* @param object - the Json Object | ||
* @param files - The running array of files or buffer to upload | ||
* @returns - The final map of all hashes to files | ||
*/ | ||
case 5: | ||
_yield$this$uploader$3 = _context6.sent; | ||
cid = _yield$this$uploader$3.cid; | ||
fileNames = _yield$this$uploader$3.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
uris = fileNames.map(function (filename) { | ||
return "".concat(baseUri).concat(filename); | ||
}); | ||
return _context6.abrupt("return", { | ||
baseUri: baseUri, | ||
uris: uris | ||
}); | ||
case 11: | ||
case "end": | ||
return _context6.stop(); | ||
} | ||
} | ||
}, _callee6, this); | ||
})); | ||
buildFilePropertiesMap(object) { | ||
let files = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; | ||
function uploadMetadataBatch(_x12, _x13, _x14, _x15, _x16) { | ||
return _uploadMetadataBatch.apply(this, arguments); | ||
} | ||
if (Array.isArray(object)) { | ||
object.forEach(element => { | ||
this.buildFilePropertiesMap(element, files); | ||
}); | ||
} else if (object) { | ||
const values = Object.values(object); | ||
return uploadMetadataBatch; | ||
}() | ||
/** ************************* | ||
* PRIVATE FUNCTIONS | ||
*************************/ | ||
}, { | ||
key: "_get", | ||
value: function () { | ||
var _get3 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee7(hash) { | ||
var uri, result, nextUrl; | ||
return _regeneratorRuntime().wrap(function _callee7$(_context7) { | ||
while (1) { | ||
switch (_context7.prev = _context7.next) { | ||
case 0: | ||
uri = hash; | ||
if (hash) { | ||
uri = resolveGatewayUrl(hash, "ipfs://", this.gatewayUrl); | ||
} | ||
_context7.next = 4; | ||
return fetch__default["default"](uri); | ||
case 4: | ||
result = _context7.sent; | ||
if (!(!result.ok && result.status === 500)) { | ||
_context7.next = 7; | ||
break; | ||
} | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
case 7: | ||
if (!(!result.ok && result.status !== 404)) { | ||
_context7.next = 16; | ||
break; | ||
} | ||
nextUrl = this.getNextPublicGateway(); | ||
if (!nextUrl) { | ||
_context7.next = 15; | ||
break; | ||
} | ||
this.failedUrls.push(this.gatewayUrl); | ||
this.gatewayUrl = nextUrl; | ||
return _context7.abrupt("return", this._get(hash)); | ||
case 15: | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
case 16: | ||
return _context7.abrupt("return", result); | ||
case 17: | ||
case "end": | ||
return _context7.stop(); | ||
} | ||
} | ||
}, _callee7, this); | ||
})); | ||
function _get(_x17) { | ||
return _get3.apply(this, arguments); | ||
for (const val of values) { | ||
if (isFileInstance(val) || isBufferInstance(val)) { | ||
files.push(val); | ||
} else if (typeof val === "object") { | ||
this.buildFilePropertiesMap(val, files); | ||
} | ||
} | ||
} | ||
return _get; | ||
}() | ||
/** | ||
* Pre-processes metadata and uploads all file properties | ||
* to storage in *bulk*, then performs a string replacement of | ||
* all file properties -\> the resulting ipfs uri. This is | ||
* called internally by `uploadMetadataBatch`. | ||
* | ||
* @internal | ||
* | ||
* @returns - The processed metadata with properties pointing at ipfs in place of `File | Buffer` | ||
* @param metadatas | ||
* @param options | ||
*/ | ||
return files; | ||
} | ||
/** | ||
* FOR TESTING ONLY | ||
* @internal | ||
* @param data - | ||
* @param contractAddress - | ||
* @param signerAddress - | ||
*/ | ||
}, { | ||
key: "batchUploadProperties", | ||
value: function () { | ||
var _batchUploadProperties = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee8(metadatas, options) { | ||
var _this2 = this; | ||
var sanitizedMetadatas, filesToUpload, _yield$this$uploader$4, cid, fileNames, cids, _iterator, _step, filename; | ||
return _regeneratorRuntime().wrap(function _callee8$(_context8) { | ||
while (1) { | ||
switch (_context8.prev = _context8.next) { | ||
case 0: | ||
// replace all active gateway url links with their raw ipfs hash | ||
sanitizedMetadatas = replaceGatewayUrlWithHash(metadatas, "ipfs://", this.gatewayUrl); // extract any binary file to upload | ||
filesToUpload = sanitizedMetadatas.flatMap(function (m) { | ||
return _this2.buildFilePropertiesMap(m, []); | ||
}); // if no binary files to upload, return the metadata | ||
if (!(filesToUpload.length === 0)) { | ||
_context8.next = 4; | ||
break; | ||
} | ||
return _context8.abrupt("return", sanitizedMetadatas); | ||
case 4: | ||
_context8.next = 6; | ||
return this.uploader.uploadBatchWithCid(filesToUpload, undefined, undefined, undefined, options); | ||
case 6: | ||
_yield$this$uploader$4 = _context8.sent; | ||
cid = _yield$this$uploader$4.cid; | ||
fileNames = _yield$this$uploader$4.fileNames; | ||
cids = []; // recurse ordered array | ||
_iterator = _createForOfIteratorHelper(fileNames); | ||
try { | ||
for (_iterator.s(); !(_step = _iterator.n()).done;) { | ||
filename = _step.value; | ||
cids.push("".concat(cid, "/").concat(filename)); | ||
} // replace all files with their ipfs hash | ||
} catch (err) { | ||
_iterator.e(err); | ||
} finally { | ||
_iterator.f(); | ||
} | ||
return _context8.abrupt("return", replaceFilePropertiesWithHashes(sanitizedMetadatas, cids)); | ||
case 13: | ||
case "end": | ||
return _context8.stop(); | ||
} | ||
} | ||
}, _callee8, this); | ||
})); | ||
function batchUploadProperties(_x18, _x19) { | ||
return _batchUploadProperties.apply(this, arguments); | ||
async uploadSingle(data, contractAddress, signerAddress) { | ||
// TODO move down to IStorageUpload | ||
const token = await this.uploader.getUploadToken(contractAddress || ""); | ||
const metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress, | ||
signerAddress | ||
} | ||
}; | ||
const formData = new FormData__default["default"](); | ||
const filepath = "files"; // Root directory | ||
return batchUploadProperties; | ||
}() | ||
/** | ||
* This function recurisely traverses an object and hashes any | ||
* `Buffer` or `File` objects into the returned map. | ||
* | ||
* @param object - the Json Object | ||
* @param files - The running array of files or buffer to upload | ||
* @returns - The final map of all hashes to files | ||
*/ | ||
formData.append("file", data, filepath); | ||
formData.append("pinataMetadata", JSON.stringify(metadata)); | ||
formData.append("pinataOptions", JSON.stringify({ | ||
wrapWithDirectory: false | ||
})); | ||
const res = await fetch__default["default"](PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: { | ||
Authorization: "Bearer ".concat(token), | ||
...formData.getHeaders() | ||
}, | ||
body: formData.getBuffer() | ||
}); | ||
}, { | ||
key: "buildFilePropertiesMap", | ||
value: function buildFilePropertiesMap(object) { | ||
var _this3 = this; | ||
var files = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; | ||
if (Array.isArray(object)) { | ||
object.forEach(function (element) { | ||
_this3.buildFilePropertiesMap(element, files); | ||
}); | ||
} else if (object) { | ||
var values = Object.values(object); | ||
for (var _i = 0, _values = values; _i < _values.length; _i++) { | ||
var val = _values[_i]; | ||
if (isFileInstance(val) || isBufferInstance(val)) { | ||
files.push(val); | ||
} else if (_typeof(val) === "object") { | ||
this.buildFilePropertiesMap(val, files); | ||
} | ||
} | ||
} | ||
return files; | ||
if (!res.ok) { | ||
throw new Error("Failed to upload to IPFS [status code = ".concat(res.status, "]")); | ||
} | ||
/** | ||
* FOR TESTING ONLY | ||
* @internal | ||
* @param data - | ||
* @param contractAddress - | ||
* @param signerAddress - | ||
*/ | ||
}, { | ||
key: "uploadSingle", | ||
value: function () { | ||
var _uploadSingle = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee9(data, contractAddress, signerAddress) { | ||
var token, metadata, formData, filepath, res, body; | ||
return _regeneratorRuntime().wrap(function _callee9$(_context9) { | ||
while (1) { | ||
switch (_context9.prev = _context9.next) { | ||
case 0: | ||
_context9.next = 2; | ||
return this.uploader.getUploadToken(contractAddress || ""); | ||
const body = await res.json(); | ||
return body.IpfsHash; | ||
} | ||
case 2: | ||
token = _context9.sent; | ||
metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress: contractAddress, | ||
signerAddress: signerAddress | ||
} | ||
}; | ||
formData = new FormData__default["default"](); | ||
filepath = "files"; // Root directory | ||
} | ||
formData.append("file", data, filepath); | ||
formData.append("pinataMetadata", JSON.stringify(metadata)); | ||
formData.append("pinataOptions", JSON.stringify({ | ||
wrapWithDirectory: false | ||
})); | ||
_context9.next = 11; | ||
return fetch__default["default"](PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: _objectSpread2({ | ||
Authorization: "Bearer ".concat(token) | ||
}, formData.getHeaders()), | ||
body: formData.getBuffer() | ||
}); | ||
case 11: | ||
res = _context9.sent; | ||
if (res.ok) { | ||
_context9.next = 14; | ||
break; | ||
} | ||
throw new Error("Failed to upload to IPFS [status code = ".concat(res.status, "]")); | ||
case 14: | ||
_context9.next = 16; | ||
return res.json(); | ||
case 16: | ||
body = _context9.sent; | ||
return _context9.abrupt("return", body.IpfsHash); | ||
case 18: | ||
case "end": | ||
return _context9.stop(); | ||
} | ||
} | ||
}, _callee9, this); | ||
})); | ||
function uploadSingle(_x20, _x21, _x22) { | ||
return _uploadSingle.apply(this, arguments); | ||
} | ||
return uploadSingle; | ||
}() | ||
}]); | ||
return IpfsStorage; | ||
}(); | ||
/** | ||
@@ -1588,6 +670,4 @@ * Fetch and upload files to IPFS or any other storage. | ||
*/ | ||
var RemoteStorage = /*#__PURE__*/function () { | ||
function RemoteStorage(storage) { | ||
_classCallCheck(this, RemoteStorage); | ||
class RemoteStorage { | ||
constructor(storage) { | ||
_defineProperty(this, "storage", void 0); | ||
@@ -1613,191 +693,75 @@ | ||
_createClass(RemoteStorage, [{ | ||
key: "fetch", | ||
value: function () { | ||
var _fetch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(hash) { | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
return _context.abrupt("return", this.storage.get(hash)); | ||
async fetch(hash) { | ||
return this.storage.get(hash); | ||
} | ||
/** | ||
* Upload any data to an IPFS directory. We'll handle all the details for you, including | ||
* pinning your files and making sure that you get the fastest upload speeds. | ||
* | ||
* @example | ||
* ```javascript | ||
* // File upload | ||
* const files = [ | ||
* fs.readFileSync("file1.png"), | ||
* fs.readFileSync("file2.png"), | ||
* ] | ||
* const result = await sdk.storage.upload(files); | ||
* // uri for each uploaded file will look like something like: ipfs://<hash>/0 | ||
* | ||
* // JSON metadata upload | ||
* const jsonMetadata = { | ||
* name: "Name", | ||
* description: "Description", | ||
* } | ||
* const result = await sdk.storage.upload(jsonMetadata); | ||
* | ||
* // Upload progress (browser only) | ||
* const result = await sdk.storage.upload(files, { | ||
* onProgress: (event: UploadProgressEvent) => { | ||
* console.log(`Downloaded ${event.progress} / ${event.total}`); | ||
* }, | ||
* }); | ||
* ``` | ||
* | ||
* @param data - An array of file data or an array of JSON metadata to upload to IPFS | ||
* @param options - Optional. Upload progress callback. | ||
* @returns The IPFS hash of the directory that holds all the uploaded data | ||
*/ | ||
case 1: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function fetch(_x) { | ||
return _fetch.apply(this, arguments); | ||
async upload(data, options) { | ||
if (!Array.isArray(data)) { | ||
if (isFileInstance(data) || isBufferInstance(data) || data.name && data.data && isBufferInstance(data.data)) { | ||
return this.uploadBatch([data], options); | ||
} else { | ||
return this.uploadMetadataBatch([data], options); | ||
} | ||
} | ||
return fetch; | ||
}() | ||
/** | ||
* Upload any data to an IPFS directory. We'll handle all the details for you, including | ||
* pinning your files and making sure that you get the fastest upload speeds. | ||
* | ||
* @example | ||
* ```javascript | ||
* // File upload | ||
* const files = [ | ||
* fs.readFileSync("file1.png"), | ||
* fs.readFileSync("file2.png"), | ||
* ] | ||
* const result = await sdk.storage.upload(files); | ||
* // uri for each uploaded file will look like something like: ipfs://<hash>/0 | ||
* | ||
* // JSON metadata upload | ||
* const jsonMetadata = { | ||
* name: "Name", | ||
* description: "Description", | ||
* } | ||
* const result = await sdk.storage.upload(jsonMetadata); | ||
* | ||
* // Upload progress (browser only) | ||
* const result = await sdk.storage.upload(files, { | ||
* onProgress: (event: UploadProgressEvent) => { | ||
* console.log(`Downloaded ${event.progress} / ${event.total}`); | ||
* }, | ||
* }); | ||
* ``` | ||
* | ||
* @param data - An array of file data or an array of JSON metadata to upload to IPFS | ||
* @param options - Optional. Upload progress callback. | ||
* @returns The IPFS hash of the directory that holds all the uploaded data | ||
*/ | ||
const allFiles = data.filter(item => isFileInstance(item) || isBufferInstance(item) || item.name && item.data && isBufferInstance(item.data)); | ||
const allObjects = data.filter(item => !isFileInstance(item) && !isBufferInstance(item)); | ||
}, { | ||
key: "upload", | ||
value: function () { | ||
var _upload = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(data, options) { | ||
var allFiles, allObjects; | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
if (Array.isArray(data)) { | ||
_context2.next = 6; | ||
break; | ||
} | ||
if (allFiles.length === data.length) { | ||
return this.uploadBatch(data, options); | ||
} else if (allObjects.length === data.length) { | ||
return this.uploadMetadataBatch(data, options); | ||
} else { | ||
throw new Error("Data to upload must be either all files or all JSON objects"); | ||
} | ||
} | ||
if (!(isFileInstance(data) || isBufferInstance(data) || data.name && data.data && isBufferInstance(data.data))) { | ||
_context2.next = 5; | ||
break; | ||
} | ||
async uploadBatch(files, options) { | ||
return await this.storage.uploadBatch(files, undefined, undefined, undefined, options); | ||
} | ||
return _context2.abrupt("return", this.uploadBatch([data], options)); | ||
async uploadMetadataBatch(metadatas, options) { | ||
return await this.storage.uploadMetadataBatch(metadatas, undefined, undefined, undefined, options); | ||
} | ||
case 5: | ||
return _context2.abrupt("return", this.uploadMetadataBatch([data], options)); | ||
} | ||
case 6: | ||
allFiles = data.filter(function (item) { | ||
return isFileInstance(item) || isBufferInstance(item) || item.name && item.data && isBufferInstance(item.data); | ||
}); | ||
allObjects = data.filter(function (item) { | ||
return !isFileInstance(item) && !isBufferInstance(item); | ||
}); | ||
const isBrowser = () => typeof window !== "undefined"; | ||
const fileOrBufferUnion = isBrowser() ? [zod.z.instanceof(File), zod.z.string()] : [zod.z.instanceof(Buffer), zod.z.string()]; | ||
const FileBufferOrStringSchema = zod.z.union(fileOrBufferUnion); | ||
if (!(allFiles.length === data.length)) { | ||
_context2.next = 12; | ||
break; | ||
} | ||
return _context2.abrupt("return", this.uploadBatch(data, options)); | ||
case 12: | ||
if (!(allObjects.length === data.length)) { | ||
_context2.next = 16; | ||
break; | ||
} | ||
return _context2.abrupt("return", this.uploadMetadataBatch(data, options)); | ||
case 16: | ||
throw new Error("Data to upload must be either all files or all JSON objects"); | ||
case 17: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function upload(_x2, _x3) { | ||
return _upload.apply(this, arguments); | ||
} | ||
return upload; | ||
}() | ||
}, { | ||
key: "uploadBatch", | ||
value: function () { | ||
var _uploadBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee3(files, options) { | ||
return _regeneratorRuntime().wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this.storage.uploadBatch(files, undefined, undefined, undefined, options); | ||
case 2: | ||
return _context3.abrupt("return", _context3.sent); | ||
case 3: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
function uploadBatch(_x4, _x5) { | ||
return _uploadBatch.apply(this, arguments); | ||
} | ||
return uploadBatch; | ||
}() | ||
}, { | ||
key: "uploadMetadataBatch", | ||
value: function () { | ||
var _uploadMetadataBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee4(metadatas, options) { | ||
return _regeneratorRuntime().wrap(function _callee4$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return this.storage.uploadMetadataBatch(metadatas, undefined, undefined, undefined, options); | ||
case 2: | ||
return _context4.abrupt("return", _context4.sent); | ||
case 3: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function uploadMetadataBatch(_x6, _x7) { | ||
return _uploadMetadataBatch.apply(this, arguments); | ||
} | ||
return uploadMetadataBatch; | ||
}() | ||
}]); | ||
return RemoteStorage; | ||
}(); | ||
var isBrowser = function isBrowser() { | ||
return typeof window !== "undefined"; | ||
}; | ||
var fileOrBufferUnion = isBrowser() ? [zod.z["instanceof"](File), zod.z.string()] : [zod.z["instanceof"](Buffer), zod.z.string()]; | ||
var FileBufferOrStringSchema = zod.z.union(fileOrBufferUnion); | ||
exports.FileBufferOrStringSchema = FileBufferOrStringSchema; | ||
@@ -1804,0 +768,0 @@ exports.IpfsStorage = IpfsStorage; |
@@ -20,522 +20,6 @@ import fetch from 'cross-fetch'; | ||
function ownKeys(object, enumerableOnly) { | ||
var keys = Object.keys(object); | ||
if (Object.getOwnPropertySymbols) { | ||
var symbols = Object.getOwnPropertySymbols(object); | ||
enumerableOnly && (symbols = symbols.filter(function (sym) { | ||
return Object.getOwnPropertyDescriptor(object, sym).enumerable; | ||
})), keys.push.apply(keys, symbols); | ||
} | ||
return keys; | ||
} | ||
function _objectSpread2(target) { | ||
for (var i = 1; i < arguments.length; i++) { | ||
var source = null != arguments[i] ? arguments[i] : {}; | ||
i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { | ||
_defineProperty(target, key, source[key]); | ||
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { | ||
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); | ||
}); | ||
} | ||
return target; | ||
} | ||
function _typeof(obj) { | ||
"@babel/helpers - typeof"; | ||
return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { | ||
return typeof obj; | ||
} : function (obj) { | ||
return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; | ||
}, _typeof(obj); | ||
} | ||
function _arrayLikeToArray(arr, len) { | ||
if (len == null || len > arr.length) len = arr.length; | ||
for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; | ||
return arr2; | ||
} | ||
function _unsupportedIterableToArray(o, minLen) { | ||
if (!o) return; | ||
if (typeof o === "string") return _arrayLikeToArray(o, minLen); | ||
var n = Object.prototype.toString.call(o).slice(8, -1); | ||
if (n === "Object" && o.constructor) n = o.constructor.name; | ||
if (n === "Map" || n === "Set") return Array.from(o); | ||
if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); | ||
} | ||
function _createForOfIteratorHelper(o, allowArrayLike) { | ||
var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; | ||
if (!it) { | ||
if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { | ||
if (it) o = it; | ||
var i = 0; | ||
var F = function () {}; | ||
return { | ||
s: F, | ||
n: function () { | ||
if (i >= o.length) return { | ||
done: true | ||
}; | ||
return { | ||
done: false, | ||
value: o[i++] | ||
}; | ||
}, | ||
e: function (e) { | ||
throw e; | ||
}, | ||
f: F | ||
}; | ||
} | ||
throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); | ||
} | ||
var normalCompletion = true, | ||
didErr = false, | ||
err; | ||
return { | ||
s: function () { | ||
it = it.call(o); | ||
}, | ||
n: function () { | ||
var step = it.next(); | ||
normalCompletion = step.done; | ||
return step; | ||
}, | ||
e: function (e) { | ||
didErr = true; | ||
err = e; | ||
}, | ||
f: function () { | ||
try { | ||
if (!normalCompletion && it.return != null) it.return(); | ||
} finally { | ||
if (didErr) throw err; | ||
} | ||
} | ||
}; | ||
} | ||
function _regeneratorRuntime() { | ||
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */ | ||
_regeneratorRuntime = function () { | ||
return exports; | ||
}; | ||
var exports = {}, | ||
Op = Object.prototype, | ||
hasOwn = Op.hasOwnProperty, | ||
$Symbol = "function" == typeof Symbol ? Symbol : {}, | ||
iteratorSymbol = $Symbol.iterator || "@@iterator", | ||
asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator", | ||
toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag"; | ||
function define(obj, key, value) { | ||
return Object.defineProperty(obj, key, { | ||
value: value, | ||
enumerable: !0, | ||
configurable: !0, | ||
writable: !0 | ||
}), obj[key]; | ||
} | ||
try { | ||
define({}, ""); | ||
} catch (err) { | ||
define = function (obj, key, value) { | ||
return obj[key] = value; | ||
}; | ||
} | ||
function wrap(innerFn, outerFn, self, tryLocsList) { | ||
var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator, | ||
generator = Object.create(protoGenerator.prototype), | ||
context = new Context(tryLocsList || []); | ||
return generator._invoke = function (innerFn, self, context) { | ||
var state = "suspendedStart"; | ||
return function (method, arg) { | ||
if ("executing" === state) throw new Error("Generator is already running"); | ||
if ("completed" === state) { | ||
if ("throw" === method) throw arg; | ||
return doneResult(); | ||
} | ||
for (context.method = method, context.arg = arg;;) { | ||
var delegate = context.delegate; | ||
if (delegate) { | ||
var delegateResult = maybeInvokeDelegate(delegate, context); | ||
if (delegateResult) { | ||
if (delegateResult === ContinueSentinel) continue; | ||
return delegateResult; | ||
} | ||
} | ||
if ("next" === context.method) context.sent = context._sent = context.arg;else if ("throw" === context.method) { | ||
if ("suspendedStart" === state) throw state = "completed", context.arg; | ||
context.dispatchException(context.arg); | ||
} else "return" === context.method && context.abrupt("return", context.arg); | ||
state = "executing"; | ||
var record = tryCatch(innerFn, self, context); | ||
if ("normal" === record.type) { | ||
if (state = context.done ? "completed" : "suspendedYield", record.arg === ContinueSentinel) continue; | ||
return { | ||
value: record.arg, | ||
done: context.done | ||
}; | ||
} | ||
"throw" === record.type && (state = "completed", context.method = "throw", context.arg = record.arg); | ||
} | ||
}; | ||
}(innerFn, self, context), generator; | ||
} | ||
function tryCatch(fn, obj, arg) { | ||
try { | ||
return { | ||
type: "normal", | ||
arg: fn.call(obj, arg) | ||
}; | ||
} catch (err) { | ||
return { | ||
type: "throw", | ||
arg: err | ||
}; | ||
} | ||
} | ||
exports.wrap = wrap; | ||
var ContinueSentinel = {}; | ||
function Generator() {} | ||
function GeneratorFunction() {} | ||
function GeneratorFunctionPrototype() {} | ||
var IteratorPrototype = {}; | ||
define(IteratorPrototype, iteratorSymbol, function () { | ||
return this; | ||
}); | ||
var getProto = Object.getPrototypeOf, | ||
NativeIteratorPrototype = getProto && getProto(getProto(values([]))); | ||
NativeIteratorPrototype && NativeIteratorPrototype !== Op && hasOwn.call(NativeIteratorPrototype, iteratorSymbol) && (IteratorPrototype = NativeIteratorPrototype); | ||
var Gp = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(IteratorPrototype); | ||
function defineIteratorMethods(prototype) { | ||
["next", "throw", "return"].forEach(function (method) { | ||
define(prototype, method, function (arg) { | ||
return this._invoke(method, arg); | ||
}); | ||
}); | ||
} | ||
function AsyncIterator(generator, PromiseImpl) { | ||
function invoke(method, arg, resolve, reject) { | ||
var record = tryCatch(generator[method], generator, arg); | ||
if ("throw" !== record.type) { | ||
var result = record.arg, | ||
value = result.value; | ||
return value && "object" == typeof value && hasOwn.call(value, "__await") ? PromiseImpl.resolve(value.__await).then(function (value) { | ||
invoke("next", value, resolve, reject); | ||
}, function (err) { | ||
invoke("throw", err, resolve, reject); | ||
}) : PromiseImpl.resolve(value).then(function (unwrapped) { | ||
result.value = unwrapped, resolve(result); | ||
}, function (error) { | ||
return invoke("throw", error, resolve, reject); | ||
}); | ||
} | ||
reject(record.arg); | ||
} | ||
var previousPromise; | ||
this._invoke = function (method, arg) { | ||
function callInvokeWithMethodAndArg() { | ||
return new PromiseImpl(function (resolve, reject) { | ||
invoke(method, arg, resolve, reject); | ||
}); | ||
} | ||
return previousPromise = previousPromise ? previousPromise.then(callInvokeWithMethodAndArg, callInvokeWithMethodAndArg) : callInvokeWithMethodAndArg(); | ||
}; | ||
} | ||
function maybeInvokeDelegate(delegate, context) { | ||
var method = delegate.iterator[context.method]; | ||
if (undefined === method) { | ||
if (context.delegate = null, "throw" === context.method) { | ||
if (delegate.iterator.return && (context.method = "return", context.arg = undefined, maybeInvokeDelegate(delegate, context), "throw" === context.method)) return ContinueSentinel; | ||
context.method = "throw", context.arg = new TypeError("The iterator does not provide a 'throw' method"); | ||
} | ||
return ContinueSentinel; | ||
} | ||
var record = tryCatch(method, delegate.iterator, context.arg); | ||
if ("throw" === record.type) return context.method = "throw", context.arg = record.arg, context.delegate = null, ContinueSentinel; | ||
var info = record.arg; | ||
return info ? info.done ? (context[delegate.resultName] = info.value, context.next = delegate.nextLoc, "return" !== context.method && (context.method = "next", context.arg = undefined), context.delegate = null, ContinueSentinel) : info : (context.method = "throw", context.arg = new TypeError("iterator result is not an object"), context.delegate = null, ContinueSentinel); | ||
} | ||
function pushTryEntry(locs) { | ||
var entry = { | ||
tryLoc: locs[0] | ||
}; | ||
1 in locs && (entry.catchLoc = locs[1]), 2 in locs && (entry.finallyLoc = locs[2], entry.afterLoc = locs[3]), this.tryEntries.push(entry); | ||
} | ||
function resetTryEntry(entry) { | ||
var record = entry.completion || {}; | ||
record.type = "normal", delete record.arg, entry.completion = record; | ||
} | ||
function Context(tryLocsList) { | ||
this.tryEntries = [{ | ||
tryLoc: "root" | ||
}], tryLocsList.forEach(pushTryEntry, this), this.reset(!0); | ||
} | ||
function values(iterable) { | ||
if (iterable) { | ||
var iteratorMethod = iterable[iteratorSymbol]; | ||
if (iteratorMethod) return iteratorMethod.call(iterable); | ||
if ("function" == typeof iterable.next) return iterable; | ||
if (!isNaN(iterable.length)) { | ||
var i = -1, | ||
next = function next() { | ||
for (; ++i < iterable.length;) if (hasOwn.call(iterable, i)) return next.value = iterable[i], next.done = !1, next; | ||
return next.value = undefined, next.done = !0, next; | ||
}; | ||
return next.next = next; | ||
} | ||
} | ||
return { | ||
next: doneResult | ||
}; | ||
} | ||
function doneResult() { | ||
return { | ||
value: undefined, | ||
done: !0 | ||
}; | ||
} | ||
return GeneratorFunction.prototype = GeneratorFunctionPrototype, define(Gp, "constructor", GeneratorFunctionPrototype), define(GeneratorFunctionPrototype, "constructor", GeneratorFunction), GeneratorFunction.displayName = define(GeneratorFunctionPrototype, toStringTagSymbol, "GeneratorFunction"), exports.isGeneratorFunction = function (genFun) { | ||
var ctor = "function" == typeof genFun && genFun.constructor; | ||
return !!ctor && (ctor === GeneratorFunction || "GeneratorFunction" === (ctor.displayName || ctor.name)); | ||
}, exports.mark = function (genFun) { | ||
return Object.setPrototypeOf ? Object.setPrototypeOf(genFun, GeneratorFunctionPrototype) : (genFun.__proto__ = GeneratorFunctionPrototype, define(genFun, toStringTagSymbol, "GeneratorFunction")), genFun.prototype = Object.create(Gp), genFun; | ||
}, exports.awrap = function (arg) { | ||
return { | ||
__await: arg | ||
}; | ||
}, defineIteratorMethods(AsyncIterator.prototype), define(AsyncIterator.prototype, asyncIteratorSymbol, function () { | ||
return this; | ||
}), exports.AsyncIterator = AsyncIterator, exports.async = function (innerFn, outerFn, self, tryLocsList, PromiseImpl) { | ||
void 0 === PromiseImpl && (PromiseImpl = Promise); | ||
var iter = new AsyncIterator(wrap(innerFn, outerFn, self, tryLocsList), PromiseImpl); | ||
return exports.isGeneratorFunction(outerFn) ? iter : iter.next().then(function (result) { | ||
return result.done ? result.value : iter.next(); | ||
}); | ||
}, defineIteratorMethods(Gp), define(Gp, toStringTagSymbol, "Generator"), define(Gp, iteratorSymbol, function () { | ||
return this; | ||
}), define(Gp, "toString", function () { | ||
return "[object Generator]"; | ||
}), exports.keys = function (object) { | ||
var keys = []; | ||
for (var key in object) keys.push(key); | ||
return keys.reverse(), function next() { | ||
for (; keys.length;) { | ||
var key = keys.pop(); | ||
if (key in object) return next.value = key, next.done = !1, next; | ||
} | ||
return next.done = !0, next; | ||
}; | ||
}, exports.values = values, Context.prototype = { | ||
constructor: Context, | ||
reset: function (skipTempReset) { | ||
if (this.prev = 0, this.next = 0, this.sent = this._sent = undefined, this.done = !1, this.delegate = null, this.method = "next", this.arg = undefined, this.tryEntries.forEach(resetTryEntry), !skipTempReset) for (var name in this) "t" === name.charAt(0) && hasOwn.call(this, name) && !isNaN(+name.slice(1)) && (this[name] = undefined); | ||
}, | ||
stop: function () { | ||
this.done = !0; | ||
var rootRecord = this.tryEntries[0].completion; | ||
if ("throw" === rootRecord.type) throw rootRecord.arg; | ||
return this.rval; | ||
}, | ||
dispatchException: function (exception) { | ||
if (this.done) throw exception; | ||
var context = this; | ||
function handle(loc, caught) { | ||
return record.type = "throw", record.arg = exception, context.next = loc, caught && (context.method = "next", context.arg = undefined), !!caught; | ||
} | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i], | ||
record = entry.completion; | ||
if ("root" === entry.tryLoc) return handle("end"); | ||
if (entry.tryLoc <= this.prev) { | ||
var hasCatch = hasOwn.call(entry, "catchLoc"), | ||
hasFinally = hasOwn.call(entry, "finallyLoc"); | ||
if (hasCatch && hasFinally) { | ||
if (this.prev < entry.catchLoc) return handle(entry.catchLoc, !0); | ||
if (this.prev < entry.finallyLoc) return handle(entry.finallyLoc); | ||
} else if (hasCatch) { | ||
if (this.prev < entry.catchLoc) return handle(entry.catchLoc, !0); | ||
} else { | ||
if (!hasFinally) throw new Error("try statement without catch or finally"); | ||
if (this.prev < entry.finallyLoc) return handle(entry.finallyLoc); | ||
} | ||
} | ||
} | ||
}, | ||
abrupt: function (type, arg) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.tryLoc <= this.prev && hasOwn.call(entry, "finallyLoc") && this.prev < entry.finallyLoc) { | ||
var finallyEntry = entry; | ||
break; | ||
} | ||
} | ||
finallyEntry && ("break" === type || "continue" === type) && finallyEntry.tryLoc <= arg && arg <= finallyEntry.finallyLoc && (finallyEntry = null); | ||
var record = finallyEntry ? finallyEntry.completion : {}; | ||
return record.type = type, record.arg = arg, finallyEntry ? (this.method = "next", this.next = finallyEntry.finallyLoc, ContinueSentinel) : this.complete(record); | ||
}, | ||
complete: function (record, afterLoc) { | ||
if ("throw" === record.type) throw record.arg; | ||
return "break" === record.type || "continue" === record.type ? this.next = record.arg : "return" === record.type ? (this.rval = this.arg = record.arg, this.method = "return", this.next = "end") : "normal" === record.type && afterLoc && (this.next = afterLoc), ContinueSentinel; | ||
}, | ||
finish: function (finallyLoc) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.finallyLoc === finallyLoc) return this.complete(entry.completion, entry.afterLoc), resetTryEntry(entry), ContinueSentinel; | ||
} | ||
}, | ||
catch: function (tryLoc) { | ||
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | ||
var entry = this.tryEntries[i]; | ||
if (entry.tryLoc === tryLoc) { | ||
var record = entry.completion; | ||
if ("throw" === record.type) { | ||
var thrown = record.arg; | ||
resetTryEntry(entry); | ||
} | ||
return thrown; | ||
} | ||
} | ||
throw new Error("illegal catch attempt"); | ||
}, | ||
delegateYield: function (iterable, resultName, nextLoc) { | ||
return this.delegate = { | ||
iterator: values(iterable), | ||
resultName: resultName, | ||
nextLoc: nextLoc | ||
}, "next" === this.method && (this.arg = undefined), ContinueSentinel; | ||
} | ||
}, exports; | ||
} | ||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { | ||
try { | ||
var info = gen[key](arg); | ||
var value = info.value; | ||
} catch (error) { | ||
reject(error); | ||
return; | ||
} | ||
if (info.done) { | ||
resolve(value); | ||
} else { | ||
Promise.resolve(value).then(_next, _throw); | ||
} | ||
} | ||
function _asyncToGenerator(fn) { | ||
return function () { | ||
var self = this, | ||
args = arguments; | ||
return new Promise(function (resolve, reject) { | ||
var gen = fn.apply(self, args); | ||
function _next(value) { | ||
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); | ||
} | ||
function _throw(err) { | ||
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); | ||
} | ||
_next(undefined); | ||
}); | ||
}; | ||
} | ||
function _classCallCheck(instance, Constructor) { | ||
if (!(instance instanceof Constructor)) { | ||
throw new TypeError("Cannot call a class as a function"); | ||
} | ||
} | ||
function _defineProperties(target, props) { | ||
for (var i = 0; i < props.length; i++) { | ||
var descriptor = props[i]; | ||
descriptor.enumerable = descriptor.enumerable || false; | ||
descriptor.configurable = true; | ||
if ("value" in descriptor) descriptor.writable = true; | ||
Object.defineProperty(target, descriptor.key, descriptor); | ||
} | ||
} | ||
function _createClass(Constructor, protoProps, staticProps) { | ||
if (protoProps) _defineProperties(Constructor.prototype, protoProps); | ||
if (staticProps) _defineProperties(Constructor, staticProps); | ||
Object.defineProperty(Constructor, "prototype", { | ||
writable: false | ||
}); | ||
return Constructor; | ||
} | ||
/** | ||
* @internal | ||
*/ | ||
var DEFAULT_IPFS_GATEWAY = "https://gateway.ipfscdn.io/ipfs/"; | ||
const DEFAULT_IPFS_GATEWAY = "https://gateway.ipfscdn.io/ipfs/"; | ||
/** | ||
@@ -545,3 +29,3 @@ * @internal | ||
var PUBLIC_GATEWAYS = ["https://gateway.ipfscdn.io/ipfs/", "https://cloudflare-ipfs.com/ipfs/", "https://ipfs.io/ipfs/"]; | ||
const PUBLIC_GATEWAYS = ["https://gateway.ipfscdn.io/ipfs/", "https://cloudflare-ipfs.com/ipfs/", "https://ipfs.io/ipfs/"]; | ||
/** | ||
@@ -551,3 +35,3 @@ * @internal | ||
var TW_IPFS_SERVER_URL = "https://upload.nftlabs.co"; | ||
const TW_IPFS_SERVER_URL = "https://upload.nftlabs.co"; | ||
/** | ||
@@ -557,3 +41,3 @@ * @internal | ||
var PINATA_IPFS_URL = "https://api.pinata.cloud/pinning/pinFileToIPFS"; | ||
const PINATA_IPFS_URL = "https://api.pinata.cloud/pinning/pinFileToIPFS"; | ||
@@ -580,9 +64,9 @@ function isFileInstance(data) { | ||
function replaceFilePropertiesWithHashes(object, cids) { | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
var isFile = isFileInstance(val) || isBufferInstance(val); | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
const isFile = isFileInstance(val) || isBufferInstance(val); | ||
if (_typeof(val) === "object" && !isFile) { | ||
if (typeof val === "object" && !isFile) { | ||
replaceFilePropertiesWithHashes(val, cids); | ||
@@ -614,11 +98,11 @@ continue; | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
object[keys[key]] = resolveGatewayUrl(val, scheme, gatewayUrl); | ||
if (Array.isArray(val)) { | ||
object[keys[key]] = val.map(function (el) { | ||
if (_typeof(el) === "object") { | ||
object[keys[key]] = val.map(el => { | ||
if (typeof el === "object") { | ||
return replaceHashWithGatewayUrl(el, scheme, gatewayUrl); | ||
@@ -631,3 +115,3 @@ } else { | ||
if (_typeof(val) === "object") { | ||
if (typeof val === "object") { | ||
replaceHashWithGatewayUrl(val, scheme, gatewayUrl); | ||
@@ -652,13 +136,13 @@ } | ||
var keys = Object.keys(object); | ||
const keys = Object.keys(object); | ||
for (var key in keys) { | ||
var val = object[keys[key]]; | ||
for (const key in keys) { | ||
const val = object[keys[key]]; | ||
object[keys[key]] = toIPFSHash(val, scheme, gatewayUrl); | ||
if (Array.isArray(val)) { | ||
object[keys[key]] = val.map(function (el) { | ||
var isFile = isFileInstance(el) || isBufferInstance(el); | ||
object[keys[key]] = val.map(el => { | ||
const isFile = isFileInstance(el) || isBufferInstance(el); | ||
if (_typeof(el) === "object" && !isFile) { | ||
if (typeof el === "object" && !isFile) { | ||
return replaceGatewayUrlWithHash(el, scheme, gatewayUrl); | ||
@@ -671,5 +155,5 @@ } else { | ||
var isFile = isFileInstance(val) || isBufferInstance(val); | ||
const isFile = isFileInstance(val) || isBufferInstance(val); | ||
if (_typeof(val) === "object" && !isFile) { | ||
if (typeof val === "object" && !isFile) { | ||
replaceGatewayUrlWithHash(val, scheme, gatewayUrl); | ||
@@ -719,274 +203,178 @@ } | ||
var PinataUploader = /*#__PURE__*/function () { | ||
function PinataUploader() { | ||
_classCallCheck(this, PinataUploader); | ||
} | ||
class PinataUploader { | ||
/** | ||
* Fetches a one-time-use upload token that can used to upload | ||
* a file to storage. | ||
* | ||
* @returns - The one time use token that can be passed to the Pinata API. | ||
*/ | ||
async getUploadToken(contractAddress) { | ||
const headers = { | ||
"X-App-Name": "CONSOLE-TS-SDK-".concat(contractAddress) | ||
}; | ||
const res = await fetch("".concat(TW_IPFS_SERVER_URL, "/grant"), { | ||
method: "GET", | ||
headers | ||
}); | ||
_createClass(PinataUploader, [{ | ||
key: "getUploadToken", | ||
value: | ||
/** | ||
* Fetches a one-time-use upload token that can used to upload | ||
* a file to storage. | ||
* | ||
* @returns - The one time use token that can be passed to the Pinata API. | ||
*/ | ||
function () { | ||
var _getUploadToken = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(contractAddress) { | ||
var headers, res, body; | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
headers = { | ||
"X-App-Name": "CONSOLE-TS-SDK-".concat(contractAddress) | ||
}; | ||
_context.next = 3; | ||
return fetch("".concat(TW_IPFS_SERVER_URL, "/grant"), { | ||
method: "GET", | ||
headers: headers | ||
}); | ||
if (!res.ok) { | ||
throw new Error("Failed to get upload token"); | ||
} | ||
case 3: | ||
res = _context.sent; | ||
const body = await res.text(); | ||
return body; | ||
} | ||
if (res.ok) { | ||
_context.next = 6; | ||
break; | ||
} | ||
async uploadBatchWithCid(files) { | ||
let fileStartNumber = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; | ||
let contractAddress = arguments.length > 2 ? arguments[2] : undefined; | ||
let signerAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let options = arguments.length > 4 ? arguments[4] : undefined; | ||
const token = await this.getUploadToken(contractAddress || ""); | ||
const formData = new FormData(); | ||
const { | ||
data, | ||
fileNames | ||
} = this.buildFormData(formData, files, fileStartNumber, contractAddress, signerAddress); | ||
throw new Error("Failed to get upload token"); | ||
if (typeof window === "undefined") { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
console.warn("The onProgress option is only supported in the browser"); | ||
} | ||
case 6: | ||
_context.next = 8; | ||
return res.text(); | ||
const res = await fetch(PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: { | ||
Authorization: "Bearer ".concat(token), | ||
...data.getHeaders() | ||
}, | ||
body: data.getBuffer() | ||
}); | ||
const body = await res.json(); | ||
case 8: | ||
body = _context.sent; | ||
return _context.abrupt("return", body); | ||
if (!res.ok) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
case 10: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee); | ||
})); | ||
const cid = body.IpfsHash; | ||
function getUploadToken(_x) { | ||
return _getUploadToken.apply(this, arguments); | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
return getUploadToken; | ||
}() | ||
}, { | ||
key: "uploadBatchWithCid", | ||
value: function () { | ||
var _uploadBatchWithCid = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(files) { | ||
var fileStartNumber, | ||
contractAddress, | ||
signerAddress, | ||
options, | ||
token, | ||
formData, | ||
_this$buildFormData, | ||
data, | ||
fileNames, | ||
res, | ||
body, | ||
cid, | ||
_args2 = arguments; | ||
return { | ||
cid, | ||
fileNames | ||
}; | ||
} else { | ||
return new Promise((resolve, reject) => { | ||
const xhr = new XMLHttpRequest(); | ||
xhr.open("POST", PINATA_IPFS_URL); | ||
xhr.setRequestHeader("Authorization", "Bearer ".concat(token)); | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
fileStartNumber = _args2.length > 1 && _args2[1] !== undefined ? _args2[1] : 0; | ||
contractAddress = _args2.length > 2 ? _args2[2] : undefined; | ||
signerAddress = _args2.length > 3 ? _args2[3] : undefined; | ||
options = _args2.length > 4 ? _args2[4] : undefined; | ||
_context2.next = 6; | ||
return this.getUploadToken(contractAddress || ""); | ||
xhr.onloadend = () => { | ||
if (xhr.status !== 200) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
case 6: | ||
token = _context2.sent; | ||
formData = new FormData(); | ||
_this$buildFormData = this.buildFormData(formData, files, fileStartNumber, contractAddress, signerAddress), data = _this$buildFormData.data, fileNames = _this$buildFormData.fileNames; | ||
const cid = JSON.parse(xhr.responseText).IpfsHash; | ||
if (!(typeof window === "undefined")) { | ||
_context2.next = 25; | ||
break; | ||
} | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
console.warn("The onProgress option is only supported in the browser"); | ||
} | ||
resolve({ | ||
cid, | ||
fileNames | ||
}); | ||
}; | ||
_context2.next = 13; | ||
return fetch(PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: _objectSpread2({ | ||
Authorization: "Bearer ".concat(token) | ||
}, data.getHeaders()), | ||
body: data.getBuffer() | ||
}); | ||
xhr.onerror = err => { | ||
reject(err); | ||
}; | ||
case 13: | ||
res = _context2.sent; | ||
_context2.next = 16; | ||
return res.json(); | ||
if (xhr.upload) { | ||
xhr.upload.onprogress = event => { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
options === null || options === void 0 ? void 0 : options.onProgress({ | ||
progress: event.loaded, | ||
total: event.total | ||
}); | ||
} | ||
}; | ||
} | ||
case 16: | ||
body = _context2.sent; | ||
xhr.send(data); | ||
}); | ||
} | ||
} | ||
if (res.ok) { | ||
_context2.next = 19; | ||
break; | ||
} | ||
buildFormData(data, files) { | ||
let fileStartNumber = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; | ||
let contractAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let signerAddress = arguments.length > 4 ? arguments[4] : undefined; | ||
const metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress, | ||
signerAddress | ||
} | ||
}; | ||
const fileNames = []; | ||
files.forEach((file, i) => { | ||
let fileName = ""; | ||
let fileData = file; // if it is a file, we passthrough the file extensions, | ||
// if it is a buffer or string, the filename would be fileStartNumber + index | ||
// if it is a buffer or string with names, the filename would be the name | ||
throw new Error("Failed to upload files to IPFS"); | ||
if (isFileInstance(file)) { | ||
let extensions = ""; | ||
case 19: | ||
cid = body.IpfsHash; | ||
if (file.name) { | ||
const extensionStartIndex = file.name.lastIndexOf("."); | ||
if (cid) { | ||
_context2.next = 22; | ||
break; | ||
} | ||
throw new Error("Failed to upload files to IPFS"); | ||
case 22: | ||
return _context2.abrupt("return", { | ||
cid: cid, | ||
fileNames: fileNames | ||
}); | ||
case 25: | ||
return _context2.abrupt("return", new Promise(function (resolve, reject) { | ||
var xhr = new XMLHttpRequest(); | ||
xhr.open("POST", PINATA_IPFS_URL); | ||
xhr.setRequestHeader("Authorization", "Bearer ".concat(token)); | ||
xhr.onloadend = function () { | ||
if (xhr.status !== 200) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
var cid = JSON.parse(xhr.responseText).IpfsHash; | ||
if (!cid) { | ||
throw new Error("Failed to upload files to IPFS"); | ||
} | ||
resolve({ | ||
cid: cid, | ||
fileNames: fileNames | ||
}); | ||
}; | ||
xhr.onerror = function (err) { | ||
reject(err); | ||
}; | ||
if (xhr.upload) { | ||
xhr.upload.onprogress = function (event) { | ||
if (options !== null && options !== void 0 && options.onProgress) { | ||
options === null || options === void 0 ? void 0 : options.onProgress({ | ||
progress: event.loaded, | ||
total: event.total | ||
}); | ||
} | ||
}; | ||
} | ||
xhr.send(data); | ||
})); | ||
case 26: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
if (extensionStartIndex > -1) { | ||
extensions = file.name.substring(extensionStartIndex); | ||
} | ||
}, _callee2, this); | ||
})); | ||
} | ||
function uploadBatchWithCid(_x2) { | ||
return _uploadBatchWithCid.apply(this, arguments); | ||
fileName = "".concat(i + fileStartNumber).concat(extensions); | ||
} else if (isBufferInstance(file) || typeof file === "string") { | ||
fileName = "".concat(i + fileStartNumber); | ||
} else if (file && file.name && file !== null && file !== void 0 && file.data) { | ||
fileData = file === null || file === void 0 ? void 0 : file.data; | ||
fileName = "".concat(file.name); | ||
} else { | ||
// default behavior | ||
fileName = "".concat(i + fileStartNumber); | ||
} | ||
return uploadBatchWithCid; | ||
}() | ||
}, { | ||
key: "buildFormData", | ||
value: function buildFormData(data, files) { | ||
var fileStartNumber = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; | ||
var contractAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
var signerAddress = arguments.length > 4 ? arguments[4] : undefined; | ||
var metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress: contractAddress, | ||
signerAddress: signerAddress | ||
} | ||
}; | ||
var fileNames = []; | ||
files.forEach(function (file, i) { | ||
var fileName = ""; | ||
var fileData = file; // if it is a file, we passthrough the file extensions, | ||
// if it is a buffer or string, the filename would be fileStartNumber + index | ||
// if it is a buffer or string with names, the filename would be the name | ||
const filepath = "files/".concat(fileName); | ||
if (isFileInstance(file)) { | ||
var extensions = ""; | ||
if (fileNames.indexOf(fileName) > -1) { | ||
throw new Error("DUPLICATE_FILE_NAME_ERROR: File name ".concat(fileName, " was passed for more than one file.")); | ||
} | ||
if (file.name) { | ||
var extensionStartIndex = file.name.lastIndexOf("."); | ||
fileNames.push(fileName); | ||
if (extensionStartIndex > -1) { | ||
extensions = file.name.substring(extensionStartIndex); | ||
} | ||
} | ||
if (typeof window === "undefined") { | ||
data.append("file", fileData, { | ||
filepath | ||
}); | ||
} else { | ||
// browser does blob things, filepath is parsed differently on browser vs node. | ||
// pls pinata? | ||
data.append("file", new Blob([fileData]), filepath); | ||
} | ||
}); | ||
data.append("pinataMetadata", JSON.stringify(metadata)); | ||
return { | ||
data, | ||
fileNames | ||
}; | ||
} | ||
fileName = "".concat(i + fileStartNumber).concat(extensions); | ||
} else if (isBufferInstance(file) || typeof file === "string") { | ||
fileName = "".concat(i + fileStartNumber); | ||
} else if (file && file.name && file !== null && file !== void 0 && file.data) { | ||
fileData = file === null || file === void 0 ? void 0 : file.data; | ||
fileName = "".concat(file.name); | ||
} else { | ||
// default behavior | ||
fileName = "".concat(i + fileStartNumber); | ||
} | ||
} | ||
var filepath = "files/".concat(fileName); | ||
if (fileNames.indexOf(fileName) > -1) { | ||
throw new Error("DUPLICATE_FILE_NAME_ERROR: File name ".concat(fileName, " was passed for more than one file.")); | ||
} | ||
fileNames.push(fileName); | ||
if (typeof window === "undefined") { | ||
data.append("file", fileData, { | ||
filepath: filepath | ||
}); | ||
} else { | ||
// browser does blob things, filepath is parsed differently on browser vs node. | ||
// pls pinata? | ||
data.append("file", new Blob([fileData]), filepath); | ||
} | ||
}); | ||
data.append("pinataMetadata", JSON.stringify(metadata)); | ||
return { | ||
data: data, | ||
fileNames: fileNames | ||
}; | ||
} | ||
}]); | ||
return PinataUploader; | ||
}(); | ||
/** | ||
@@ -998,3 +386,3 @@ * IPFS Storage implementation, accepts custom IPFS gateways | ||
var IpfsStorage = /*#__PURE__*/function () { | ||
class IpfsStorage { | ||
/** | ||
@@ -1004,8 +392,6 @@ * {@inheritdoc IStorage.gatewayUrl} | ||
*/ | ||
function IpfsStorage() { | ||
var gatewayUrl = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : DEFAULT_IPFS_GATEWAY; | ||
var uploader = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : new PinataUploader(); | ||
constructor() { | ||
let gatewayUrl = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : DEFAULT_IPFS_GATEWAY; | ||
let uploader = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : new PinataUploader(); | ||
_classCallCheck(this, IpfsStorage); | ||
_defineProperty(this, "gatewayUrl", void 0); | ||
@@ -1021,555 +407,251 @@ | ||
_createClass(IpfsStorage, [{ | ||
key: "getNextPublicGateway", | ||
value: function getNextPublicGateway() { | ||
var _this = this; | ||
getNextPublicGateway() { | ||
const urlsToTry = PUBLIC_GATEWAYS.filter(url => !this.failedUrls.includes(url)).filter(url => url !== this.gatewayUrl); | ||
var urlsToTry = PUBLIC_GATEWAYS.filter(function (url) { | ||
return !_this.failedUrls.includes(url); | ||
}).filter(function (url) { | ||
return url !== _this.gatewayUrl; | ||
}); | ||
if (urlsToTry.length > 0) { | ||
return urlsToTry[0]; | ||
} else { | ||
this.failedUrls = []; | ||
return undefined; | ||
} | ||
if (urlsToTry.length > 0) { | ||
return urlsToTry[0]; | ||
} else { | ||
this.failedUrls = []; | ||
return undefined; | ||
} | ||
/** | ||
* Upload a file to IPFS and return the hash | ||
* @remarks This method is a wrapper around {@link IStorage.upload} | ||
* @example | ||
* ```javascript | ||
* const file = './path/to/file.png'; // Can be a path or a File object such as a file from an input element. | ||
* const hash = await sdk.storage.upload(file); | ||
* ``` | ||
* | ||
* | ||
*/ | ||
} | ||
/** | ||
* Upload a file to IPFS and return the hash | ||
* @remarks This method is a wrapper around {@link IStorage.upload} | ||
* @example | ||
* ```javascript | ||
* const file = './path/to/file.png'; // Can be a path or a File object such as a file from an input element. | ||
* const hash = await sdk.storage.upload(file); | ||
* ``` | ||
* | ||
* | ||
*/ | ||
}, { | ||
key: "upload", | ||
value: function () { | ||
var _upload = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(data, contractAddress, signerAddress, options) { | ||
var _yield$this$uploader$, cid, fileNames, baseUri; | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
_context.next = 2; | ||
return this.uploader.uploadBatchWithCid([data], 0, contractAddress, signerAddress, options); | ||
async upload(data, contractAddress, signerAddress, options) { | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid([data], 0, contractAddress, signerAddress, options); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
return "".concat(baseUri).concat(fileNames[0]); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadBatch} | ||
*/ | ||
case 2: | ||
_yield$this$uploader$ = _context.sent; | ||
cid = _yield$this$uploader$.cid; | ||
fileNames = _yield$this$uploader$.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
return _context.abrupt("return", "".concat(baseUri).concat(fileNames[0])); | ||
case 7: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
async uploadBatch(files) { | ||
let fileStartNumber = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; | ||
let contractAddress = arguments.length > 2 ? arguments[2] : undefined; | ||
let signerAddress = arguments.length > 3 ? arguments[3] : undefined; | ||
let options = arguments.length > 4 ? arguments[4] : undefined; | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(files, fileStartNumber, contractAddress, signerAddress, options); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
const uris = fileNames.map(filename => "".concat(baseUri).concat(filename)); | ||
return { | ||
baseUri, | ||
uris | ||
}; | ||
} | ||
/** | ||
* {@inheritDoc IStorage.get} | ||
*/ | ||
function upload(_x, _x2, _x3, _x4) { | ||
return _upload.apply(this, arguments); | ||
} | ||
return upload; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadBatch} | ||
*/ | ||
async get(hash) { | ||
const res = await this._get(hash); | ||
const json = await res.json(); | ||
return replaceHashWithGatewayUrl(json, "ipfs://", this.gatewayUrl); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.getRaw} | ||
*/ | ||
}, { | ||
key: "uploadBatch", | ||
value: function () { | ||
var _uploadBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(files) { | ||
var fileStartNumber, | ||
contractAddress, | ||
signerAddress, | ||
options, | ||
_yield$this$uploader$2, | ||
cid, | ||
fileNames, | ||
baseUri, | ||
uris, | ||
_args2 = arguments; | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
fileStartNumber = _args2.length > 1 && _args2[1] !== undefined ? _args2[1] : 0; | ||
contractAddress = _args2.length > 2 ? _args2[2] : undefined; | ||
signerAddress = _args2.length > 3 ? _args2[3] : undefined; | ||
options = _args2.length > 4 ? _args2[4] : undefined; | ||
_context2.next = 6; | ||
return this.uploader.uploadBatchWithCid(files, fileStartNumber, contractAddress, signerAddress, options); | ||
async getRaw(hash) { | ||
const res = await this._get(hash); | ||
return await res.text(); | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadata} | ||
*/ | ||
case 6: | ||
_yield$this$uploader$2 = _context2.sent; | ||
cid = _yield$this$uploader$2.cid; | ||
fileNames = _yield$this$uploader$2.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
uris = fileNames.map(function (filename) { | ||
return "".concat(baseUri).concat(filename); | ||
}); | ||
return _context2.abrupt("return", { | ||
baseUri: baseUri, | ||
uris: uris | ||
}); | ||
case 12: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
async uploadMetadata(metadata, contractAddress, signerAddress, options) { | ||
// since there's only single object, always use the first index | ||
const { | ||
uris | ||
} = await this.uploadMetadataBatch([metadata], 0, contractAddress, signerAddress, options); | ||
return uris[0]; | ||
} | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadataBatch} | ||
*/ | ||
function uploadBatch(_x5) { | ||
return _uploadBatch.apply(this, arguments); | ||
} | ||
return uploadBatch; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.get} | ||
*/ | ||
async uploadMetadataBatch(metadatas, fileStartNumber, contractAddress, signerAddress, options) { | ||
const metadataToUpload = (await this.batchUploadProperties(metadatas, options)).map(m => JSON.stringify(m)); | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(metadataToUpload, fileStartNumber, contractAddress, signerAddress); | ||
const baseUri = "ipfs://".concat(cid, "/"); | ||
const uris = fileNames.map(filename => "".concat(baseUri).concat(filename)); | ||
return { | ||
baseUri, | ||
uris | ||
}; | ||
} | ||
/** ************************* | ||
* PRIVATE FUNCTIONS | ||
*************************/ | ||
}, { | ||
key: "get", | ||
value: function () { | ||
var _get2 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee3(hash) { | ||
var res, json; | ||
return _regeneratorRuntime().wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this._get(hash); | ||
case 2: | ||
res = _context3.sent; | ||
_context3.next = 5; | ||
return res.json(); | ||
async _get(hash) { | ||
let uri = hash; | ||
case 5: | ||
json = _context3.sent; | ||
return _context3.abrupt("return", replaceHashWithGatewayUrl(json, "ipfs://", this.gatewayUrl)); | ||
if (hash) { | ||
uri = resolveGatewayUrl(hash, "ipfs://", this.gatewayUrl); | ||
} | ||
case 7: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
const result = await fetch(uri); | ||
function get(_x6) { | ||
return _get2.apply(this, arguments); | ||
} | ||
if (!result.ok && result.status === 500) { | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
} | ||
return get; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.getRaw} | ||
*/ | ||
if (!result.ok && result.status !== 404) { | ||
const nextUrl = this.getNextPublicGateway(); | ||
}, { | ||
key: "getRaw", | ||
value: function () { | ||
var _getRaw = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee4(hash) { | ||
var res; | ||
return _regeneratorRuntime().wrap(function _callee4$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return this._get(hash); | ||
case 2: | ||
res = _context4.sent; | ||
_context4.next = 5; | ||
return res.text(); | ||
case 5: | ||
return _context4.abrupt("return", _context4.sent); | ||
case 6: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function getRaw(_x7) { | ||
return _getRaw.apply(this, arguments); | ||
if (nextUrl) { | ||
this.failedUrls.push(this.gatewayUrl); | ||
this.gatewayUrl = nextUrl; | ||
return this._get(hash); | ||
} else { | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
} | ||
} | ||
return getRaw; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadata} | ||
*/ | ||
return result; | ||
} | ||
/** | ||
* Pre-processes metadata and uploads all file properties | ||
* to storage in *bulk*, then performs a string replacement of | ||
* all file properties -\> the resulting ipfs uri. This is | ||
* called internally by `uploadMetadataBatch`. | ||
* | ||
* @internal | ||
* | ||
* @returns - The processed metadata with properties pointing at ipfs in place of `File | Buffer` | ||
* @param metadatas | ||
* @param options | ||
*/ | ||
}, { | ||
key: "uploadMetadata", | ||
value: function () { | ||
var _uploadMetadata = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee5(metadata, contractAddress, signerAddress, options) { | ||
var _yield$this$uploadMet, uris; | ||
return _regeneratorRuntime().wrap(function _callee5$(_context5) { | ||
while (1) { | ||
switch (_context5.prev = _context5.next) { | ||
case 0: | ||
_context5.next = 2; | ||
return this.uploadMetadataBatch([metadata], 0, contractAddress, signerAddress, options); | ||
async batchUploadProperties(metadatas, options) { | ||
// replace all active gateway url links with their raw ipfs hash | ||
const sanitizedMetadatas = replaceGatewayUrlWithHash(metadatas, "ipfs://", this.gatewayUrl); // extract any binary file to upload | ||
case 2: | ||
_yield$this$uploadMet = _context5.sent; | ||
uris = _yield$this$uploadMet.uris; | ||
return _context5.abrupt("return", uris[0]); | ||
const filesToUpload = sanitizedMetadatas.flatMap(m => this.buildFilePropertiesMap(m, [])); // if no binary files to upload, return the metadata | ||
case 5: | ||
case "end": | ||
return _context5.stop(); | ||
} | ||
} | ||
}, _callee5, this); | ||
})); | ||
if (filesToUpload.length === 0) { | ||
return sanitizedMetadatas; | ||
} // otherwise upload those files | ||
function uploadMetadata(_x8, _x9, _x10, _x11) { | ||
return _uploadMetadata.apply(this, arguments); | ||
} | ||
return uploadMetadata; | ||
}() | ||
/** | ||
* {@inheritDoc IStorage.uploadMetadataBatch} | ||
*/ | ||
const { | ||
cid, | ||
fileNames | ||
} = await this.uploader.uploadBatchWithCid(filesToUpload, undefined, undefined, undefined, options); | ||
const cids = []; // recurse ordered array | ||
}, { | ||
key: "uploadMetadataBatch", | ||
value: function () { | ||
var _uploadMetadataBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee6(metadatas, fileStartNumber, contractAddress, signerAddress, options) { | ||
var metadataToUpload, _yield$this$uploader$3, cid, fileNames, baseUri, uris; | ||
for (const filename of fileNames) { | ||
cids.push("".concat(cid, "/").concat(filename)); | ||
} // replace all files with their ipfs hash | ||
return _regeneratorRuntime().wrap(function _callee6$(_context6) { | ||
while (1) { | ||
switch (_context6.prev = _context6.next) { | ||
case 0: | ||
_context6.next = 2; | ||
return this.batchUploadProperties(metadatas, options); | ||
case 2: | ||
metadataToUpload = _context6.sent.map(function (m) { | ||
return JSON.stringify(m); | ||
}); | ||
_context6.next = 5; | ||
return this.uploader.uploadBatchWithCid(metadataToUpload, fileStartNumber, contractAddress, signerAddress); | ||
return replaceFilePropertiesWithHashes(sanitizedMetadatas, cids); | ||
} | ||
/** | ||
* This function recurisely traverses an object and hashes any | ||
* `Buffer` or `File` objects into the returned map. | ||
* | ||
* @param object - the Json Object | ||
* @param files - The running array of files or buffer to upload | ||
* @returns - The final map of all hashes to files | ||
*/ | ||
case 5: | ||
_yield$this$uploader$3 = _context6.sent; | ||
cid = _yield$this$uploader$3.cid; | ||
fileNames = _yield$this$uploader$3.fileNames; | ||
baseUri = "ipfs://".concat(cid, "/"); | ||
uris = fileNames.map(function (filename) { | ||
return "".concat(baseUri).concat(filename); | ||
}); | ||
return _context6.abrupt("return", { | ||
baseUri: baseUri, | ||
uris: uris | ||
}); | ||
case 11: | ||
case "end": | ||
return _context6.stop(); | ||
} | ||
} | ||
}, _callee6, this); | ||
})); | ||
buildFilePropertiesMap(object) { | ||
let files = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; | ||
function uploadMetadataBatch(_x12, _x13, _x14, _x15, _x16) { | ||
return _uploadMetadataBatch.apply(this, arguments); | ||
} | ||
if (Array.isArray(object)) { | ||
object.forEach(element => { | ||
this.buildFilePropertiesMap(element, files); | ||
}); | ||
} else if (object) { | ||
const values = Object.values(object); | ||
return uploadMetadataBatch; | ||
}() | ||
/** ************************* | ||
* PRIVATE FUNCTIONS | ||
*************************/ | ||
}, { | ||
key: "_get", | ||
value: function () { | ||
var _get3 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee7(hash) { | ||
var uri, result, nextUrl; | ||
return _regeneratorRuntime().wrap(function _callee7$(_context7) { | ||
while (1) { | ||
switch (_context7.prev = _context7.next) { | ||
case 0: | ||
uri = hash; | ||
if (hash) { | ||
uri = resolveGatewayUrl(hash, "ipfs://", this.gatewayUrl); | ||
} | ||
_context7.next = 4; | ||
return fetch(uri); | ||
case 4: | ||
result = _context7.sent; | ||
if (!(!result.ok && result.status === 500)) { | ||
_context7.next = 7; | ||
break; | ||
} | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
case 7: | ||
if (!(!result.ok && result.status !== 404)) { | ||
_context7.next = 16; | ||
break; | ||
} | ||
nextUrl = this.getNextPublicGateway(); | ||
if (!nextUrl) { | ||
_context7.next = 15; | ||
break; | ||
} | ||
this.failedUrls.push(this.gatewayUrl); | ||
this.gatewayUrl = nextUrl; | ||
return _context7.abrupt("return", this._get(hash)); | ||
case 15: | ||
throw new Error("Error fetching ".concat(uri, " - Status code ").concat(result.status)); | ||
case 16: | ||
return _context7.abrupt("return", result); | ||
case 17: | ||
case "end": | ||
return _context7.stop(); | ||
} | ||
} | ||
}, _callee7, this); | ||
})); | ||
function _get(_x17) { | ||
return _get3.apply(this, arguments); | ||
for (const val of values) { | ||
if (isFileInstance(val) || isBufferInstance(val)) { | ||
files.push(val); | ||
} else if (typeof val === "object") { | ||
this.buildFilePropertiesMap(val, files); | ||
} | ||
} | ||
} | ||
return _get; | ||
}() | ||
/** | ||
* Pre-processes metadata and uploads all file properties | ||
* to storage in *bulk*, then performs a string replacement of | ||
* all file properties -\> the resulting ipfs uri. This is | ||
* called internally by `uploadMetadataBatch`. | ||
* | ||
* @internal | ||
* | ||
* @returns - The processed metadata with properties pointing at ipfs in place of `File | Buffer` | ||
* @param metadatas | ||
* @param options | ||
*/ | ||
return files; | ||
} | ||
/** | ||
* FOR TESTING ONLY | ||
* @internal | ||
* @param data - | ||
* @param contractAddress - | ||
* @param signerAddress - | ||
*/ | ||
}, { | ||
key: "batchUploadProperties", | ||
value: function () { | ||
var _batchUploadProperties = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee8(metadatas, options) { | ||
var _this2 = this; | ||
var sanitizedMetadatas, filesToUpload, _yield$this$uploader$4, cid, fileNames, cids, _iterator, _step, filename; | ||
return _regeneratorRuntime().wrap(function _callee8$(_context8) { | ||
while (1) { | ||
switch (_context8.prev = _context8.next) { | ||
case 0: | ||
// replace all active gateway url links with their raw ipfs hash | ||
sanitizedMetadatas = replaceGatewayUrlWithHash(metadatas, "ipfs://", this.gatewayUrl); // extract any binary file to upload | ||
filesToUpload = sanitizedMetadatas.flatMap(function (m) { | ||
return _this2.buildFilePropertiesMap(m, []); | ||
}); // if no binary files to upload, return the metadata | ||
if (!(filesToUpload.length === 0)) { | ||
_context8.next = 4; | ||
break; | ||
} | ||
return _context8.abrupt("return", sanitizedMetadatas); | ||
case 4: | ||
_context8.next = 6; | ||
return this.uploader.uploadBatchWithCid(filesToUpload, undefined, undefined, undefined, options); | ||
case 6: | ||
_yield$this$uploader$4 = _context8.sent; | ||
cid = _yield$this$uploader$4.cid; | ||
fileNames = _yield$this$uploader$4.fileNames; | ||
cids = []; // recurse ordered array | ||
_iterator = _createForOfIteratorHelper(fileNames); | ||
try { | ||
for (_iterator.s(); !(_step = _iterator.n()).done;) { | ||
filename = _step.value; | ||
cids.push("".concat(cid, "/").concat(filename)); | ||
} // replace all files with their ipfs hash | ||
} catch (err) { | ||
_iterator.e(err); | ||
} finally { | ||
_iterator.f(); | ||
} | ||
return _context8.abrupt("return", replaceFilePropertiesWithHashes(sanitizedMetadatas, cids)); | ||
case 13: | ||
case "end": | ||
return _context8.stop(); | ||
} | ||
} | ||
}, _callee8, this); | ||
})); | ||
function batchUploadProperties(_x18, _x19) { | ||
return _batchUploadProperties.apply(this, arguments); | ||
async uploadSingle(data, contractAddress, signerAddress) { | ||
// TODO move down to IStorageUpload | ||
const token = await this.uploader.getUploadToken(contractAddress || ""); | ||
const metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress, | ||
signerAddress | ||
} | ||
}; | ||
const formData = new FormData(); | ||
const filepath = "files"; // Root directory | ||
return batchUploadProperties; | ||
}() | ||
/** | ||
* This function recurisely traverses an object and hashes any | ||
* `Buffer` or `File` objects into the returned map. | ||
* | ||
* @param object - the Json Object | ||
* @param files - The running array of files or buffer to upload | ||
* @returns - The final map of all hashes to files | ||
*/ | ||
formData.append("file", data, filepath); | ||
formData.append("pinataMetadata", JSON.stringify(metadata)); | ||
formData.append("pinataOptions", JSON.stringify({ | ||
wrapWithDirectory: false | ||
})); | ||
const res = await fetch(PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: { | ||
Authorization: "Bearer ".concat(token), | ||
...formData.getHeaders() | ||
}, | ||
body: formData.getBuffer() | ||
}); | ||
}, { | ||
key: "buildFilePropertiesMap", | ||
value: function buildFilePropertiesMap(object) { | ||
var _this3 = this; | ||
var files = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; | ||
if (Array.isArray(object)) { | ||
object.forEach(function (element) { | ||
_this3.buildFilePropertiesMap(element, files); | ||
}); | ||
} else if (object) { | ||
var values = Object.values(object); | ||
for (var _i = 0, _values = values; _i < _values.length; _i++) { | ||
var val = _values[_i]; | ||
if (isFileInstance(val) || isBufferInstance(val)) { | ||
files.push(val); | ||
} else if (_typeof(val) === "object") { | ||
this.buildFilePropertiesMap(val, files); | ||
} | ||
} | ||
} | ||
return files; | ||
if (!res.ok) { | ||
throw new Error("Failed to upload to IPFS [status code = ".concat(res.status, "]")); | ||
} | ||
/** | ||
* FOR TESTING ONLY | ||
* @internal | ||
* @param data - | ||
* @param contractAddress - | ||
* @param signerAddress - | ||
*/ | ||
}, { | ||
key: "uploadSingle", | ||
value: function () { | ||
var _uploadSingle = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee9(data, contractAddress, signerAddress) { | ||
var token, metadata, formData, filepath, res, body; | ||
return _regeneratorRuntime().wrap(function _callee9$(_context9) { | ||
while (1) { | ||
switch (_context9.prev = _context9.next) { | ||
case 0: | ||
_context9.next = 2; | ||
return this.uploader.getUploadToken(contractAddress || ""); | ||
const body = await res.json(); | ||
return body.IpfsHash; | ||
} | ||
case 2: | ||
token = _context9.sent; | ||
metadata = { | ||
name: "CONSOLE-TS-SDK-".concat(contractAddress), | ||
keyvalues: { | ||
sdk: "typescript", | ||
contractAddress: contractAddress, | ||
signerAddress: signerAddress | ||
} | ||
}; | ||
formData = new FormData(); | ||
filepath = "files"; // Root directory | ||
} | ||
formData.append("file", data, filepath); | ||
formData.append("pinataMetadata", JSON.stringify(metadata)); | ||
formData.append("pinataOptions", JSON.stringify({ | ||
wrapWithDirectory: false | ||
})); | ||
_context9.next = 11; | ||
return fetch(PINATA_IPFS_URL, { | ||
method: "POST", | ||
headers: _objectSpread2({ | ||
Authorization: "Bearer ".concat(token) | ||
}, formData.getHeaders()), | ||
body: formData.getBuffer() | ||
}); | ||
case 11: | ||
res = _context9.sent; | ||
if (res.ok) { | ||
_context9.next = 14; | ||
break; | ||
} | ||
throw new Error("Failed to upload to IPFS [status code = ".concat(res.status, "]")); | ||
case 14: | ||
_context9.next = 16; | ||
return res.json(); | ||
case 16: | ||
body = _context9.sent; | ||
return _context9.abrupt("return", body.IpfsHash); | ||
case 18: | ||
case "end": | ||
return _context9.stop(); | ||
} | ||
} | ||
}, _callee9, this); | ||
})); | ||
function uploadSingle(_x20, _x21, _x22) { | ||
return _uploadSingle.apply(this, arguments); | ||
} | ||
return uploadSingle; | ||
}() | ||
}]); | ||
return IpfsStorage; | ||
}(); | ||
/** | ||
@@ -1579,6 +661,4 @@ * Fetch and upload files to IPFS or any other storage. | ||
*/ | ||
var RemoteStorage = /*#__PURE__*/function () { | ||
function RemoteStorage(storage) { | ||
_classCallCheck(this, RemoteStorage); | ||
class RemoteStorage { | ||
constructor(storage) { | ||
_defineProperty(this, "storage", void 0); | ||
@@ -1604,191 +684,75 @@ | ||
_createClass(RemoteStorage, [{ | ||
key: "fetch", | ||
value: function () { | ||
var _fetch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee(hash) { | ||
return _regeneratorRuntime().wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
return _context.abrupt("return", this.storage.get(hash)); | ||
async fetch(hash) { | ||
return this.storage.get(hash); | ||
} | ||
/** | ||
* Upload any data to an IPFS directory. We'll handle all the details for you, including | ||
* pinning your files and making sure that you get the fastest upload speeds. | ||
* | ||
* @example | ||
* ```javascript | ||
* // File upload | ||
* const files = [ | ||
* fs.readFileSync("file1.png"), | ||
* fs.readFileSync("file2.png"), | ||
* ] | ||
* const result = await sdk.storage.upload(files); | ||
* // uri for each uploaded file will look like something like: ipfs://<hash>/0 | ||
* | ||
* // JSON metadata upload | ||
* const jsonMetadata = { | ||
* name: "Name", | ||
* description: "Description", | ||
* } | ||
* const result = await sdk.storage.upload(jsonMetadata); | ||
* | ||
* // Upload progress (browser only) | ||
* const result = await sdk.storage.upload(files, { | ||
* onProgress: (event: UploadProgressEvent) => { | ||
* console.log(`Downloaded ${event.progress} / ${event.total}`); | ||
* }, | ||
* }); | ||
* ``` | ||
* | ||
* @param data - An array of file data or an array of JSON metadata to upload to IPFS | ||
* @param options - Optional. Upload progress callback. | ||
* @returns The IPFS hash of the directory that holds all the uploaded data | ||
*/ | ||
case 1: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function fetch(_x) { | ||
return _fetch.apply(this, arguments); | ||
async upload(data, options) { | ||
if (!Array.isArray(data)) { | ||
if (isFileInstance(data) || isBufferInstance(data) || data.name && data.data && isBufferInstance(data.data)) { | ||
return this.uploadBatch([data], options); | ||
} else { | ||
return this.uploadMetadataBatch([data], options); | ||
} | ||
} | ||
return fetch; | ||
}() | ||
/** | ||
* Upload any data to an IPFS directory. We'll handle all the details for you, including | ||
* pinning your files and making sure that you get the fastest upload speeds. | ||
* | ||
* @example | ||
* ```javascript | ||
* // File upload | ||
* const files = [ | ||
* fs.readFileSync("file1.png"), | ||
* fs.readFileSync("file2.png"), | ||
* ] | ||
* const result = await sdk.storage.upload(files); | ||
* // uri for each uploaded file will look like something like: ipfs://<hash>/0 | ||
* | ||
* // JSON metadata upload | ||
* const jsonMetadata = { | ||
* name: "Name", | ||
* description: "Description", | ||
* } | ||
* const result = await sdk.storage.upload(jsonMetadata); | ||
* | ||
* // Upload progress (browser only) | ||
* const result = await sdk.storage.upload(files, { | ||
* onProgress: (event: UploadProgressEvent) => { | ||
* console.log(`Downloaded ${event.progress} / ${event.total}`); | ||
* }, | ||
* }); | ||
* ``` | ||
* | ||
* @param data - An array of file data or an array of JSON metadata to upload to IPFS | ||
* @param options - Optional. Upload progress callback. | ||
* @returns The IPFS hash of the directory that holds all the uploaded data | ||
*/ | ||
const allFiles = data.filter(item => isFileInstance(item) || isBufferInstance(item) || item.name && item.data && isBufferInstance(item.data)); | ||
const allObjects = data.filter(item => !isFileInstance(item) && !isBufferInstance(item)); | ||
}, { | ||
key: "upload", | ||
value: function () { | ||
var _upload = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee2(data, options) { | ||
var allFiles, allObjects; | ||
return _regeneratorRuntime().wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
if (Array.isArray(data)) { | ||
_context2.next = 6; | ||
break; | ||
} | ||
if (allFiles.length === data.length) { | ||
return this.uploadBatch(data, options); | ||
} else if (allObjects.length === data.length) { | ||
return this.uploadMetadataBatch(data, options); | ||
} else { | ||
throw new Error("Data to upload must be either all files or all JSON objects"); | ||
} | ||
} | ||
if (!(isFileInstance(data) || isBufferInstance(data) || data.name && data.data && isBufferInstance(data.data))) { | ||
_context2.next = 5; | ||
break; | ||
} | ||
async uploadBatch(files, options) { | ||
return await this.storage.uploadBatch(files, undefined, undefined, undefined, options); | ||
} | ||
return _context2.abrupt("return", this.uploadBatch([data], options)); | ||
async uploadMetadataBatch(metadatas, options) { | ||
return await this.storage.uploadMetadataBatch(metadatas, undefined, undefined, undefined, options); | ||
} | ||
case 5: | ||
return _context2.abrupt("return", this.uploadMetadataBatch([data], options)); | ||
} | ||
case 6: | ||
allFiles = data.filter(function (item) { | ||
return isFileInstance(item) || isBufferInstance(item) || item.name && item.data && isBufferInstance(item.data); | ||
}); | ||
allObjects = data.filter(function (item) { | ||
return !isFileInstance(item) && !isBufferInstance(item); | ||
}); | ||
const isBrowser = () => typeof window !== "undefined"; | ||
const fileOrBufferUnion = isBrowser() ? [z.instanceof(File), z.string()] : [z.instanceof(Buffer), z.string()]; | ||
const FileBufferOrStringSchema = z.union(fileOrBufferUnion); | ||
if (!(allFiles.length === data.length)) { | ||
_context2.next = 12; | ||
break; | ||
} | ||
return _context2.abrupt("return", this.uploadBatch(data, options)); | ||
case 12: | ||
if (!(allObjects.length === data.length)) { | ||
_context2.next = 16; | ||
break; | ||
} | ||
return _context2.abrupt("return", this.uploadMetadataBatch(data, options)); | ||
case 16: | ||
throw new Error("Data to upload must be either all files or all JSON objects"); | ||
case 17: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function upload(_x2, _x3) { | ||
return _upload.apply(this, arguments); | ||
} | ||
return upload; | ||
}() | ||
}, { | ||
key: "uploadBatch", | ||
value: function () { | ||
var _uploadBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee3(files, options) { | ||
return _regeneratorRuntime().wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this.storage.uploadBatch(files, undefined, undefined, undefined, options); | ||
case 2: | ||
return _context3.abrupt("return", _context3.sent); | ||
case 3: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
function uploadBatch(_x4, _x5) { | ||
return _uploadBatch.apply(this, arguments); | ||
} | ||
return uploadBatch; | ||
}() | ||
}, { | ||
key: "uploadMetadataBatch", | ||
value: function () { | ||
var _uploadMetadataBatch = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime().mark(function _callee4(metadatas, options) { | ||
return _regeneratorRuntime().wrap(function _callee4$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return this.storage.uploadMetadataBatch(metadatas, undefined, undefined, undefined, options); | ||
case 2: | ||
return _context4.abrupt("return", _context4.sent); | ||
case 3: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function uploadMetadataBatch(_x6, _x7) { | ||
return _uploadMetadataBatch.apply(this, arguments); | ||
} | ||
return uploadMetadataBatch; | ||
}() | ||
}]); | ||
return RemoteStorage; | ||
}(); | ||
var isBrowser = function isBrowser() { | ||
return typeof window !== "undefined"; | ||
}; | ||
var fileOrBufferUnion = isBrowser() ? [z["instanceof"](File), z.string()] : [z["instanceof"](Buffer), z.string()]; | ||
var FileBufferOrStringSchema = z.union(fileOrBufferUnion); | ||
export { FileBufferOrStringSchema, IpfsStorage, RemoteStorage, isBrowser, isBufferInstance, isFileInstance }; |
{ | ||
"name": "@thirdweb-dev/storage", | ||
"version": "0.2.2", | ||
"version": "0.2.3-nightly-349b5c1", | ||
"main": "dist/thirdweb-dev-storage.cjs.js", | ||
@@ -20,11 +20,5 @@ "module": "dist/thirdweb-dev-storage.esm.js", | ||
], | ||
"preconstruct": { | ||
"entrypoints": [ | ||
"index.ts" | ||
] | ||
}, | ||
"devDependencies": { | ||
"@babel/preset-env": "^7.18.10", | ||
"@babel/preset-typescript": "^7.18.6", | ||
"@preconstruct/cli": "^2.2.1", | ||
"@types/chai": "^4.3.3", | ||
@@ -40,2 +34,3 @@ "@types/mocha": "^9.1.1", | ||
"tsc": "^2.0.4", | ||
"@preconstruct/cli": "^2.2.1", | ||
"typescript": "^4.7.4" | ||
@@ -42,0 +37,0 @@ }, |
<p align="center"> | ||
<br /> | ||
<a href="https://thirdweb.com"><img src="https://github.com/thirdweb-dev/typescript-sdk/blob/main/logo.svg?raw=true" width="200" alt=""/></a> | ||
<a href="https://thirdweb.com"><img src="https://github.com/thirdweb-dev/js/blob/main/packages/sdk/logo.svg?raw=true" width="200" alt=""/></a> | ||
<br /> | ||
@@ -5,0 +5,0 @@ </p> |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
7
102093
2315