react-server-dom-webpack
Advanced tools
Comparing version 0.0.0-experimental-d3def4793-20240208 to 0.0.0-experimental-d4287258-20241217
/** | ||
* @license React | ||
* react-server-dom-webpack-client.browser.production.min.js | ||
* react-server-dom-webpack-client.browser.production.js | ||
* | ||
@@ -11,643 +11,518 @@ * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
'use strict'; | ||
var ReactDOM = require('react-dom'); | ||
function createStringDecoder() { | ||
return new TextDecoder(); | ||
} | ||
const decoderOptions = { | ||
stream: true | ||
}; | ||
function readPartialStringChunk(decoder, buffer) { | ||
return decoder.decode(buffer, decoderOptions); | ||
} | ||
function readFinalStringChunk(decoder, buffer) { | ||
return decoder.decode(buffer); | ||
} | ||
// This is the parsed shape of the wire format which is why it is | ||
// condensed to only the essentialy information | ||
const ID = 0; | ||
const CHUNKS = 1; | ||
const NAME = 2; // export const ASYNC = 3; | ||
// This logic is correct because currently only include the 4th tuple member | ||
// when the module is async. If that changes we will need to actually assert | ||
// the value is true. We don't index into the 4th slot because flow does not | ||
// like the potential out of bounds access | ||
function isAsyncImport(metadata) { | ||
return metadata.length === 4; | ||
} | ||
"use strict"; | ||
var ReactDOM = require("react-dom"), | ||
decoderOptions = { stream: !0 }; | ||
function resolveClientReference(bundlerConfig, metadata) { | ||
if (bundlerConfig) { | ||
const moduleExports = bundlerConfig[metadata[ID]]; | ||
let resolvedModuleData = moduleExports[metadata[NAME]]; | ||
let name; | ||
if (resolvedModuleData) { | ||
// The potentially aliased name. | ||
name = resolvedModuleData.name; | ||
} else { | ||
// If we don't have this specific name, we might have the full module. | ||
resolvedModuleData = moduleExports['*']; | ||
if (!resolvedModuleData) { | ||
throw new Error('Could not find the module "' + metadata[ID] + '" in the React SSR Manifest. ' + 'This is probably a bug in the React Server Components bundler.'); | ||
} | ||
name = metadata[NAME]; | ||
var moduleExports = bundlerConfig[metadata[0]]; | ||
if ((bundlerConfig = moduleExports && moduleExports[metadata[2]])) | ||
moduleExports = bundlerConfig.name; | ||
else { | ||
bundlerConfig = moduleExports && moduleExports["*"]; | ||
if (!bundlerConfig) | ||
throw Error( | ||
'Could not find the module "' + | ||
metadata[0] + | ||
'" in the React Server Consumer Manifest. This is probably a bug in the React Server Components bundler.' | ||
); | ||
moduleExports = metadata[2]; | ||
} | ||
if (isAsyncImport(metadata)) { | ||
return [resolvedModuleData.id, resolvedModuleData.chunks, name, 1 | ||
/* async */ | ||
]; | ||
} else { | ||
return [resolvedModuleData.id, resolvedModuleData.chunks, name]; | ||
} | ||
return 4 === metadata.length | ||
? [bundlerConfig.id, bundlerConfig.chunks, moduleExports, 1] | ||
: [bundlerConfig.id, bundlerConfig.chunks, moduleExports]; | ||
} | ||
return metadata; | ||
} | ||
// If they're still pending they're a thenable. This map also exists | ||
// in Webpack but unfortunately it's not exposed so we have to | ||
// replicate it in user space. null means that it has already loaded. | ||
const chunkCache = new Map(); | ||
function resolveServerReference(bundlerConfig, id) { | ||
var name = "", | ||
resolvedModuleData = bundlerConfig[id]; | ||
if (resolvedModuleData) name = resolvedModuleData.name; | ||
else { | ||
var idx = id.lastIndexOf("#"); | ||
-1 !== idx && | ||
((name = id.slice(idx + 1)), | ||
(resolvedModuleData = bundlerConfig[id.slice(0, idx)])); | ||
if (!resolvedModuleData) | ||
throw Error( | ||
'Could not find the module "' + | ||
id + | ||
'" in the React Server Manifest. This is probably a bug in the React Server Components bundler.' | ||
); | ||
} | ||
return resolvedModuleData.async | ||
? [resolvedModuleData.id, resolvedModuleData.chunks, name, 1] | ||
: [resolvedModuleData.id, resolvedModuleData.chunks, name]; | ||
} | ||
var chunkCache = new Map(); | ||
function requireAsyncModule(id) { | ||
// We've already loaded all the chunks. We can require the module. | ||
const promise = __webpack_require__(id); | ||
if (typeof promise.then !== 'function') { | ||
// This wasn't a promise after all. | ||
var promise = __webpack_require__(id); | ||
if ("function" !== typeof promise.then || "fulfilled" === promise.status) | ||
return null; | ||
} else if (promise.status === 'fulfilled') { | ||
// This module was already resolved earlier. | ||
return null; | ||
} else { | ||
// Instrument the Promise to stash the result. | ||
promise.then(value => { | ||
const fulfilledThenable = promise; | ||
fulfilledThenable.status = 'fulfilled'; | ||
fulfilledThenable.value = value; | ||
}, reason => { | ||
const rejectedThenable = promise; | ||
rejectedThenable.status = 'rejected'; | ||
rejectedThenable.reason = reason; | ||
}); | ||
return promise; | ||
} | ||
promise.then( | ||
function (value) { | ||
promise.status = "fulfilled"; | ||
promise.value = value; | ||
}, | ||
function (reason) { | ||
promise.status = "rejected"; | ||
promise.reason = reason; | ||
} | ||
); | ||
return promise; | ||
} | ||
function ignoreReject() {// We rely on rejected promises to be handled by another listener. | ||
} // Start preloading the modules since we might need them soon. | ||
// This function doesn't suspend. | ||
function ignoreReject() {} | ||
function preloadModule(metadata) { | ||
const chunks = metadata[CHUNKS]; | ||
const promises = []; | ||
let i = 0; | ||
while (i < chunks.length) { | ||
const chunkId = chunks[i++]; | ||
const chunkFilename = chunks[i++]; | ||
const entry = chunkCache.get(chunkId); | ||
if (entry === undefined) { | ||
const thenable = loadChunk(chunkId, chunkFilename); | ||
promises.push(thenable); // $FlowFixMe[method-unbinding] | ||
const resolve = chunkCache.set.bind(chunkCache, chunkId, null); | ||
thenable.then(resolve, ignoreReject); | ||
chunkCache.set(chunkId, thenable); | ||
} else if (entry !== null) { | ||
promises.push(entry); | ||
} | ||
for (var chunks = metadata[1], promises = [], i = 0; i < chunks.length; ) { | ||
var chunkId = chunks[i++], | ||
chunkFilename = chunks[i++], | ||
entry = chunkCache.get(chunkId); | ||
void 0 === entry | ||
? (chunkMap.set(chunkId, chunkFilename), | ||
(chunkFilename = __webpack_chunk_load__(chunkId)), | ||
promises.push(chunkFilename), | ||
(entry = chunkCache.set.bind(chunkCache, chunkId, null)), | ||
chunkFilename.then(entry, ignoreReject), | ||
chunkCache.set(chunkId, chunkFilename)) | ||
: null !== entry && promises.push(entry); | ||
} | ||
if (isAsyncImport(metadata)) { | ||
if (promises.length === 0) { | ||
return requireAsyncModule(metadata[ID]); | ||
} else { | ||
return Promise.all(promises).then(() => { | ||
return requireAsyncModule(metadata[ID]); | ||
}); | ||
} | ||
} else if (promises.length > 0) { | ||
return Promise.all(promises); | ||
} else { | ||
return null; | ||
} | ||
} // Actually require the module or suspend if it's not yet ready. | ||
// Increase priority if necessary. | ||
return 4 === metadata.length | ||
? 0 === promises.length | ||
? requireAsyncModule(metadata[0]) | ||
: Promise.all(promises).then(function () { | ||
return requireAsyncModule(metadata[0]); | ||
}) | ||
: 0 < promises.length | ||
? Promise.all(promises) | ||
: null; | ||
} | ||
function requireModule(metadata) { | ||
let moduleExports = __webpack_require__(metadata[ID]); | ||
if (isAsyncImport(metadata)) { | ||
if (typeof moduleExports.then !== 'function') ; else if (moduleExports.status === 'fulfilled') { | ||
// This Promise should've been instrumented by preloadModule. | ||
var moduleExports = __webpack_require__(metadata[0]); | ||
if (4 === metadata.length && "function" === typeof moduleExports.then) | ||
if ("fulfilled" === moduleExports.status) | ||
moduleExports = moduleExports.value; | ||
} else { | ||
throw moduleExports.reason; | ||
} | ||
} | ||
if (metadata[NAME] === '*') { | ||
// This is a placeholder value that represents that the caller imported this | ||
// as a CommonJS module as is. | ||
return moduleExports; | ||
} | ||
if (metadata[NAME] === '') { | ||
// This is a placeholder value that represents that the caller accessed the | ||
// default property of this if it was an ESM interop module. | ||
return moduleExports.__esModule ? moduleExports.default : moduleExports; | ||
} | ||
return moduleExports[metadata[NAME]]; | ||
else throw moduleExports.reason; | ||
return "*" === metadata[2] | ||
? moduleExports | ||
: "" === metadata[2] | ||
? moduleExports.__esModule | ||
? moduleExports.default | ||
: moduleExports | ||
: moduleExports[metadata[2]]; | ||
} | ||
const chunkMap = new Map(); | ||
/** | ||
* We patch the chunk filename function in webpack to insert our own resolution | ||
* of chunks that come from Flight and may not be known to the webpack runtime | ||
*/ | ||
const webpackGetChunkFilename = __webpack_require__.u; | ||
var chunkMap = new Map(), | ||
webpackGetChunkFilename = __webpack_require__.u; | ||
__webpack_require__.u = function (chunkId) { | ||
const flightChunk = chunkMap.get(chunkId); | ||
if (flightChunk !== undefined) { | ||
return flightChunk; | ||
} | ||
return webpackGetChunkFilename(chunkId); | ||
var flightChunk = chunkMap.get(chunkId); | ||
return void 0 !== flightChunk | ||
? flightChunk | ||
: webpackGetChunkFilename(chunkId); | ||
}; | ||
function loadChunk(chunkId, filename) { | ||
chunkMap.set(chunkId, filename); | ||
return __webpack_chunk_load__(chunkId); | ||
var ReactDOMSharedInternals = | ||
ReactDOM.__DOM_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE, | ||
REACT_ELEMENT_TYPE = Symbol.for("react.transitional.element"), | ||
REACT_LAZY_TYPE = Symbol.for("react.lazy"), | ||
REACT_POSTPONE_TYPE = Symbol.for("react.postpone"), | ||
MAYBE_ITERATOR_SYMBOL = Symbol.iterator; | ||
function getIteratorFn(maybeIterable) { | ||
if (null === maybeIterable || "object" !== typeof maybeIterable) return null; | ||
maybeIterable = | ||
(MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL]) || | ||
maybeIterable["@@iterator"]; | ||
return "function" === typeof maybeIterable ? maybeIterable : null; | ||
} | ||
const ReactDOMSharedInternals = ReactDOM.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED; | ||
// This client file is in the shared folder because it applies to both SSR and browser contexts. | ||
const ReactDOMCurrentDispatcher = ReactDOMSharedInternals.Dispatcher; | ||
function dispatchHint(code, model) { | ||
const dispatcher = ReactDOMCurrentDispatcher.current; | ||
if (dispatcher) { | ||
switch (code) { | ||
case 'D': | ||
{ | ||
const refined = refineModel(code, model); | ||
const href = refined; | ||
dispatcher.prefetchDNS(href); | ||
return; | ||
} | ||
case 'C': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preconnect(href); | ||
} else { | ||
const href = refined[0]; | ||
const crossOrigin = refined[1]; | ||
dispatcher.preconnect(href, crossOrigin); | ||
} | ||
return; | ||
} | ||
case 'L': | ||
{ | ||
const refined = refineModel(code, model); | ||
const href = refined[0]; | ||
const as = refined[1]; | ||
if (refined.length === 3) { | ||
const options = refined[2]; | ||
dispatcher.preload(href, as, options); | ||
} else { | ||
dispatcher.preload(href, as); | ||
} | ||
return; | ||
} | ||
case 'm': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preloadModule(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preloadModule(href, options); | ||
} | ||
return; | ||
} | ||
case 'S': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitStyle(href); | ||
} else { | ||
const href = refined[0]; | ||
const precedence = refined[1] === 0 ? undefined : refined[1]; | ||
const options = refined.length === 3 ? refined[2] : undefined; | ||
dispatcher.preinitStyle(href, precedence, options); | ||
} | ||
return; | ||
} | ||
case 'X': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitScript(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preinitScript(href, options); | ||
} | ||
return; | ||
} | ||
case 'M': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitModuleScript(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preinitModuleScript(href, options); | ||
} | ||
return; | ||
} | ||
} | ||
} | ||
} // Flow is having trouble refining the HintModels so we help it a bit. | ||
// This should be compiled out in the production build. | ||
function refineModel(code, model) { | ||
return model; | ||
var ASYNC_ITERATOR = Symbol.asyncIterator, | ||
isArrayImpl = Array.isArray, | ||
getPrototypeOf = Object.getPrototypeOf, | ||
ObjectPrototype = Object.prototype, | ||
knownServerReferences = new WeakMap(); | ||
function serializeNumber(number) { | ||
return Number.isFinite(number) | ||
? 0 === number && -Infinity === 1 / number | ||
? "$-0" | ||
: number | ||
: Infinity === number | ||
? "$Infinity" | ||
: -Infinity === number | ||
? "$-Infinity" | ||
: "$NaN"; | ||
} | ||
// ATTENTION | ||
// When adding new symbols to this file, | ||
// Please consider also adding to 'react-devtools-shared/src/backend/ReactSymbols' | ||
// The Symbol used to tag the ReactElement-like types. | ||
const REACT_ELEMENT_TYPE = Symbol.for('react.element'); | ||
const REACT_LAZY_TYPE = Symbol.for('react.lazy'); | ||
const REACT_POSTPONE_TYPE = Symbol.for('react.postpone'); | ||
const MAYBE_ITERATOR_SYMBOL = Symbol.iterator; | ||
const FAUX_ITERATOR_SYMBOL = '@@iterator'; | ||
function getIteratorFn(maybeIterable) { | ||
if (maybeIterable === null || typeof maybeIterable !== 'object') { | ||
return null; | ||
function processReply( | ||
root, | ||
formFieldPrefix, | ||
temporaryReferences, | ||
resolve, | ||
reject | ||
) { | ||
function serializeTypedArray(tag, typedArray) { | ||
typedArray = new Blob([ | ||
new Uint8Array( | ||
typedArray.buffer, | ||
typedArray.byteOffset, | ||
typedArray.byteLength | ||
) | ||
]); | ||
var blobId = nextPartId++; | ||
null === formData && (formData = new FormData()); | ||
formData.append(formFieldPrefix + blobId, typedArray); | ||
return "$" + tag + blobId.toString(16); | ||
} | ||
const maybeIterator = MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL]; | ||
if (typeof maybeIterator === 'function') { | ||
return maybeIterator; | ||
function serializeBinaryReader(reader) { | ||
function progress(entry) { | ||
entry.done | ||
? ((entry = nextPartId++), | ||
data.append(formFieldPrefix + entry, new Blob(buffer)), | ||
data.append( | ||
formFieldPrefix + streamId, | ||
'"$o' + entry.toString(16) + '"' | ||
), | ||
data.append(formFieldPrefix + streamId, "C"), | ||
pendingParts--, | ||
0 === pendingParts && resolve(data)) | ||
: (buffer.push(entry.value), | ||
reader.read(new Uint8Array(1024)).then(progress, reject)); | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++, | ||
buffer = []; | ||
reader.read(new Uint8Array(1024)).then(progress, reject); | ||
return "$r" + streamId.toString(16); | ||
} | ||
return null; | ||
} | ||
const isArrayImpl = Array.isArray; // eslint-disable-next-line no-redeclare | ||
function isArray(a) { | ||
return isArrayImpl(a); | ||
} | ||
const getPrototypeOf = Object.getPrototypeOf; | ||
const ObjectPrototype = Object.prototype; | ||
const knownServerReferences = new WeakMap(); // Serializable values | ||
// Thenable<ReactServerValue> | ||
// function serializeByValueID(id: number): string { | ||
// return '$' + id.toString(16); | ||
// } | ||
function serializePromiseID(id) { | ||
return '$@' + id.toString(16); | ||
} | ||
function serializeServerReferenceID(id) { | ||
return '$F' + id.toString(16); | ||
} | ||
function serializeSymbolReference(name) { | ||
return '$S' + name; | ||
} | ||
function serializeFormDataReference(id) { | ||
// Why K? F is "Function". D is "Date". What else? | ||
return '$K' + id.toString(16); | ||
} | ||
function serializeNumber(number) { | ||
if (Number.isFinite(number)) { | ||
if (number === 0 && 1 / number === -Infinity) { | ||
return '$-0'; | ||
} else { | ||
return number; | ||
function serializeReader(reader) { | ||
function progress(entry) { | ||
if (entry.done) | ||
data.append(formFieldPrefix + streamId, "C"), | ||
pendingParts--, | ||
0 === pendingParts && resolve(data); | ||
else | ||
try { | ||
var partJSON = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, partJSON); | ||
reader.read().then(progress, reject); | ||
} catch (x) { | ||
reject(x); | ||
} | ||
} | ||
} else { | ||
if (number === Infinity) { | ||
return '$Infinity'; | ||
} else if (number === -Infinity) { | ||
return '$-Infinity'; | ||
} else { | ||
return '$NaN'; | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++; | ||
reader.read().then(progress, reject); | ||
return "$R" + streamId.toString(16); | ||
} | ||
function serializeReadableStream(stream) { | ||
try { | ||
var binaryReader = stream.getReader({ mode: "byob" }); | ||
} catch (x) { | ||
return serializeReader(stream.getReader()); | ||
} | ||
return serializeBinaryReader(binaryReader); | ||
} | ||
} | ||
function serializeUndefined() { | ||
return '$undefined'; | ||
} | ||
function serializeDateFromDateJSON(dateJSON) { | ||
// JSON.stringify automatically calls Date.prototype.toJSON which calls toISOString. | ||
// We need only tack on a $D prefix. | ||
return '$D' + dateJSON; | ||
} | ||
function serializeBigInt(n) { | ||
return '$n' + n.toString(10); | ||
} | ||
function serializeMapID(id) { | ||
return '$Q' + id.toString(16); | ||
} | ||
function serializeSetID(id) { | ||
return '$W' + id.toString(16); | ||
} | ||
function escapeStringValue(value) { | ||
if (value[0] === '$') { | ||
// We need to escape $ prefixed strings since we use those to encode | ||
// references to IDs and as special symbol values. | ||
return '$' + value; | ||
} else { | ||
return value; | ||
function serializeAsyncIterable(iterable, iterator) { | ||
function progress(entry) { | ||
if (entry.done) { | ||
if (void 0 === entry.value) | ||
data.append(formFieldPrefix + streamId, "C"); | ||
else | ||
try { | ||
var partJSON = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, "C" + partJSON); | ||
} catch (x) { | ||
reject(x); | ||
return; | ||
} | ||
pendingParts--; | ||
0 === pendingParts && resolve(data); | ||
} else | ||
try { | ||
var partJSON$22 = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, partJSON$22); | ||
iterator.next().then(progress, reject); | ||
} catch (x$23) { | ||
reject(x$23); | ||
} | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++; | ||
iterable = iterable === iterator; | ||
iterator.next().then(progress, reject); | ||
return "$" + (iterable ? "x" : "X") + streamId.toString(16); | ||
} | ||
} | ||
function processReply(root, formFieldPrefix, resolve, reject) { | ||
let nextPartId = 1; | ||
let pendingParts = 0; | ||
let formData = null; | ||
function resolveToJSON(key, value) { | ||
const parent = this; // Make sure that `parent[key]` wasn't JSONified before `value` was passed to us | ||
if (value === null) { | ||
return null; | ||
} | ||
if (typeof value === 'object') { | ||
// $FlowFixMe[method-unbinding] | ||
if (typeof value.then === 'function') { | ||
// We assume that any object with a .then property is a "Thenable" type, | ||
// or a Promise type. Either of which can be represented by a Promise. | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to stream this value. | ||
formData = new FormData(); | ||
} | ||
if (null === value) return null; | ||
if ("object" === typeof value) { | ||
switch (value.$$typeof) { | ||
case REACT_ELEMENT_TYPE: | ||
if (void 0 !== temporaryReferences && -1 === key.indexOf(":")) { | ||
var parentReference = writtenObjects.get(this); | ||
if (void 0 !== parentReference) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), | ||
"$T" | ||
); | ||
} | ||
throw Error( | ||
"React Element cannot be passed to Server Functions from the Client without a temporary reference set. Pass a TemporaryReferenceSet to the options." | ||
); | ||
case REACT_LAZY_TYPE: | ||
parentReference = value._payload; | ||
var init = value._init; | ||
null === formData && (formData = new FormData()); | ||
pendingParts++; | ||
try { | ||
var resolvedModel = init(parentReference), | ||
lazyId = nextPartId++, | ||
partJSON = serializeModel(resolvedModel, lazyId); | ||
formData.append(formFieldPrefix + lazyId, partJSON); | ||
return "$" + lazyId.toString(16); | ||
} catch (x) { | ||
if ( | ||
"object" === typeof x && | ||
null !== x && | ||
"function" === typeof x.then | ||
) { | ||
pendingParts++; | ||
var lazyId$24 = nextPartId++; | ||
parentReference = function () { | ||
try { | ||
var partJSON$25 = serializeModel(value, lazyId$24), | ||
data$26 = formData; | ||
data$26.append(formFieldPrefix + lazyId$24, partJSON$25); | ||
pendingParts--; | ||
0 === pendingParts && resolve(data$26); | ||
} catch (reason) { | ||
reject(reason); | ||
} | ||
}; | ||
x.then(parentReference, parentReference); | ||
return "$" + lazyId$24.toString(16); | ||
} | ||
reject(x); | ||
return null; | ||
} finally { | ||
pendingParts--; | ||
} | ||
} | ||
if ("function" === typeof value.then) { | ||
null === formData && (formData = new FormData()); | ||
pendingParts++; | ||
const promiseId = nextPartId++; | ||
const thenable = value; | ||
thenable.then(partValue => { | ||
const partJSON = JSON.stringify(partValue, resolveToJSON); // $FlowFixMe[incompatible-type] We know it's not null because we assigned it above. | ||
const data = formData; // eslint-disable-next-line react-internal/safe-string-coercion | ||
data.append(formFieldPrefix + promiseId, partJSON); | ||
pendingParts--; | ||
if (pendingParts === 0) { | ||
resolve(data); | ||
var promiseId = nextPartId++; | ||
value.then(function (partValue) { | ||
try { | ||
var partJSON$28 = serializeModel(partValue, promiseId); | ||
partValue = formData; | ||
partValue.append(formFieldPrefix + promiseId, partJSON$28); | ||
pendingParts--; | ||
0 === pendingParts && resolve(partValue); | ||
} catch (reason) { | ||
reject(reason); | ||
} | ||
}, reason => { | ||
// In the future we could consider serializing this as an error | ||
// that throws on the server instead. | ||
reject(reason); | ||
}); | ||
return serializePromiseID(promiseId); | ||
}, reject); | ||
return "$@" + promiseId.toString(16); | ||
} | ||
if (isArray(value)) { | ||
// $FlowFixMe[incompatible-return] | ||
return value; | ||
} // TODO: Should we the Object.prototype.toString.call() to test for cross-realm objects? | ||
parentReference = writtenObjects.get(value); | ||
if (void 0 !== parentReference) | ||
if (modelRoot === value) modelRoot = null; | ||
else return parentReference; | ||
else | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference && | ||
((key = parentReference + ":" + key), | ||
writtenObjects.set(value, key), | ||
void 0 !== temporaryReferences && | ||
temporaryReferences.set(key, value))); | ||
if (isArrayImpl(value)) return value; | ||
if (value instanceof FormData) { | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to use rich objects as its values. | ||
formData = new FormData(); | ||
} | ||
const data = formData; | ||
const refId = nextPartId++; // Copy all the form fields with a prefix for this reference. | ||
// These must come first in the form order because we assume that all the | ||
// fields are available before this is referenced. | ||
const prefix = formFieldPrefix + refId + '_'; // $FlowFixMe[prop-missing]: FormData has forEach. | ||
value.forEach((originalValue, originalKey) => { | ||
data.append(prefix + originalKey, originalValue); | ||
null === formData && (formData = new FormData()); | ||
var data$32 = formData; | ||
key = nextPartId++; | ||
var prefix = formFieldPrefix + key + "_"; | ||
value.forEach(function (originalValue, originalKey) { | ||
data$32.append(prefix + originalKey, originalValue); | ||
}); | ||
return serializeFormDataReference(refId); | ||
return "$K" + key.toString(16); | ||
} | ||
if (value instanceof Map) { | ||
const partJSON = JSON.stringify(Array.from(value), resolveToJSON); | ||
if (formData === null) { | ||
formData = new FormData(); | ||
} | ||
const mapId = nextPartId++; | ||
formData.append(formFieldPrefix + mapId, partJSON); | ||
return serializeMapID(mapId); | ||
if (value instanceof Map) | ||
return ( | ||
(key = nextPartId++), | ||
(parentReference = serializeModel(Array.from(value), key)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$Q" + key.toString(16) | ||
); | ||
if (value instanceof Set) | ||
return ( | ||
(key = nextPartId++), | ||
(parentReference = serializeModel(Array.from(value), key)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$W" + key.toString(16) | ||
); | ||
if (value instanceof ArrayBuffer) | ||
return ( | ||
(key = new Blob([value])), | ||
(parentReference = nextPartId++), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + parentReference, key), | ||
"$A" + parentReference.toString(16) | ||
); | ||
if (value instanceof Int8Array) return serializeTypedArray("O", value); | ||
if (value instanceof Uint8Array) return serializeTypedArray("o", value); | ||
if (value instanceof Uint8ClampedArray) | ||
return serializeTypedArray("U", value); | ||
if (value instanceof Int16Array) return serializeTypedArray("S", value); | ||
if (value instanceof Uint16Array) return serializeTypedArray("s", value); | ||
if (value instanceof Int32Array) return serializeTypedArray("L", value); | ||
if (value instanceof Uint32Array) return serializeTypedArray("l", value); | ||
if (value instanceof Float32Array) return serializeTypedArray("G", value); | ||
if (value instanceof Float64Array) return serializeTypedArray("g", value); | ||
if (value instanceof BigInt64Array) | ||
return serializeTypedArray("M", value); | ||
if (value instanceof BigUint64Array) | ||
return serializeTypedArray("m", value); | ||
if (value instanceof DataView) return serializeTypedArray("V", value); | ||
if ("function" === typeof Blob && value instanceof Blob) | ||
return ( | ||
null === formData && (formData = new FormData()), | ||
(key = nextPartId++), | ||
formData.append(formFieldPrefix + key, value), | ||
"$B" + key.toString(16) | ||
); | ||
if ((key = getIteratorFn(value))) | ||
return ( | ||
(parentReference = key.call(value)), | ||
parentReference === value | ||
? ((key = nextPartId++), | ||
(parentReference = serializeModel( | ||
Array.from(parentReference), | ||
key | ||
)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$i" + key.toString(16)) | ||
: Array.from(parentReference) | ||
); | ||
if ( | ||
"function" === typeof ReadableStream && | ||
value instanceof ReadableStream | ||
) | ||
return serializeReadableStream(value); | ||
key = value[ASYNC_ITERATOR]; | ||
if ("function" === typeof key) | ||
return serializeAsyncIterable(value, key.call(value)); | ||
key = getPrototypeOf(value); | ||
if ( | ||
key !== ObjectPrototype && | ||
(null === key || null !== getPrototypeOf(key)) | ||
) { | ||
if (void 0 === temporaryReferences) | ||
throw Error( | ||
"Only plain objects, and a few built-ins, can be passed to Server Functions. Classes or null prototypes are not supported." | ||
); | ||
return "$T"; | ||
} | ||
if (value instanceof Set) { | ||
const partJSON = JSON.stringify(Array.from(value), resolveToJSON); | ||
if (formData === null) { | ||
formData = new FormData(); | ||
} | ||
const setId = nextPartId++; | ||
formData.append(formFieldPrefix + setId, partJSON); | ||
return serializeSetID(setId); | ||
} | ||
const iteratorFn = getIteratorFn(value); | ||
if (iteratorFn) { | ||
return Array.from(value); | ||
} // Verify that this is a simple plain object. | ||
const proto = getPrototypeOf(value); | ||
if (proto !== ObjectPrototype && (proto === null || getPrototypeOf(proto) !== null)) { | ||
throw new Error('Only plain objects, and a few built-ins, can be passed to Server Actions. ' + 'Classes or null prototypes are not supported.'); | ||
} | ||
return value; | ||
} | ||
if (typeof value === 'string') { | ||
// TODO: Maybe too clever. If we support URL there's no similar trick. | ||
if (value[value.length - 1] === 'Z') { | ||
// Possibly a Date, whose toJSON automatically calls toISOString | ||
// $FlowFixMe[incompatible-use] | ||
const originalValue = parent[key]; | ||
if (originalValue instanceof Date) { | ||
return serializeDateFromDateJSON(value); | ||
} | ||
} | ||
return escapeStringValue(value); | ||
if ("string" === typeof value) { | ||
if ("Z" === value[value.length - 1] && this[key] instanceof Date) | ||
return "$D" + value; | ||
key = "$" === value[0] ? "$" + value : value; | ||
return key; | ||
} | ||
if (typeof value === 'boolean') { | ||
return value; | ||
if ("boolean" === typeof value) return value; | ||
if ("number" === typeof value) return serializeNumber(value); | ||
if ("undefined" === typeof value) return "$undefined"; | ||
if ("function" === typeof value) { | ||
parentReference = knownServerReferences.get(value); | ||
if (void 0 !== parentReference) | ||
return ( | ||
(key = JSON.stringify(parentReference, resolveToJSON)), | ||
null === formData && (formData = new FormData()), | ||
(parentReference = nextPartId++), | ||
formData.set(formFieldPrefix + parentReference, key), | ||
"$F" + parentReference.toString(16) | ||
); | ||
if ( | ||
void 0 !== temporaryReferences && | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference) | ||
) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), "$T" | ||
); | ||
throw Error( | ||
"Client Functions cannot be passed directly to Server Functions. Only Functions passed from the Server can be passed back again." | ||
); | ||
} | ||
if (typeof value === 'number') { | ||
return serializeNumber(value); | ||
if ("symbol" === typeof value) { | ||
if ( | ||
void 0 !== temporaryReferences && | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference) | ||
) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), "$T" | ||
); | ||
throw Error( | ||
"Symbols cannot be passed to a Server Function without a temporary reference set. Pass a TemporaryReferenceSet to the options." | ||
); | ||
} | ||
if (typeof value === 'undefined') { | ||
return serializeUndefined(); | ||
} | ||
if (typeof value === 'function') { | ||
const metaData = knownServerReferences.get(value); | ||
if (metaData !== undefined) { | ||
const metaDataJSON = JSON.stringify(metaData, resolveToJSON); | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to stream this value. | ||
formData = new FormData(); | ||
} // The reference to this function came from the same client so we can pass it back. | ||
const refId = nextPartId++; // eslint-disable-next-line react-internal/safe-string-coercion | ||
formData.set(formFieldPrefix + refId, metaDataJSON); | ||
return serializeServerReferenceID(refId); | ||
} | ||
throw new Error('Client Functions cannot be passed directly to Server Functions. ' + 'Only Functions passed from the Server can be passed back again.'); | ||
} | ||
if (typeof value === 'symbol') { | ||
// $FlowFixMe[incompatible-type] `description` might be undefined | ||
const name = value.description; | ||
if (Symbol.for(name) !== value) { | ||
throw new Error('Only global symbols received from Symbol.for(...) can be passed to Server Functions. ' + ("The symbol Symbol.for(" + // $FlowFixMe[incompatible-type] `description` might be undefined | ||
value.description + ") cannot be found among global symbols.")); | ||
} | ||
return serializeSymbolReference(name); | ||
} | ||
if (typeof value === 'bigint') { | ||
return serializeBigInt(value); | ||
} | ||
throw new Error("Type " + typeof value + " is not supported as an argument to a Server Function."); | ||
} // $FlowFixMe[incompatible-type] it's not going to be undefined because we'll encode it. | ||
const json = JSON.stringify(root, resolveToJSON); | ||
if (formData === null) { | ||
// If it's a simple data structure, we just use plain JSON. | ||
resolve(json); | ||
} else { | ||
// Otherwise, we use FormData to let us stream in the result. | ||
formData.set(formFieldPrefix + '0', json); | ||
if (pendingParts === 0) { | ||
// $FlowFixMe[incompatible-call] this has already been refined. | ||
resolve(formData); | ||
} | ||
if ("bigint" === typeof value) return "$n" + value.toString(10); | ||
throw Error( | ||
"Type " + | ||
typeof value + | ||
" is not supported as an argument to a Server Function." | ||
); | ||
} | ||
function serializeModel(model, id) { | ||
"object" === typeof model && | ||
null !== model && | ||
((id = "$" + id.toString(16)), | ||
writtenObjects.set(model, id), | ||
void 0 !== temporaryReferences && temporaryReferences.set(id, model)); | ||
modelRoot = model; | ||
return JSON.stringify(model, resolveToJSON); | ||
} | ||
var nextPartId = 1, | ||
pendingParts = 0, | ||
formData = null, | ||
writtenObjects = new WeakMap(), | ||
modelRoot = root, | ||
json = serializeModel(root, 0); | ||
null === formData | ||
? resolve(json) | ||
: (formData.set(formFieldPrefix + "0", json), | ||
0 === pendingParts && resolve(formData)); | ||
return function () { | ||
0 < pendingParts && | ||
((pendingParts = 0), | ||
null === formData ? resolve(json) : resolve(formData)); | ||
}; | ||
} | ||
function registerServerReference(proxy, reference) { | ||
knownServerReferences.set(proxy, reference); | ||
} // $FlowFixMe[method-unbinding] | ||
function createServerReference(id, callServer) { | ||
const proxy = function () { | ||
// $FlowFixMe[method-unbinding] | ||
const args = Array.prototype.slice.call(arguments); | ||
return callServer(id, args); | ||
}; | ||
registerServerReference(proxy, { | ||
id, | ||
bound: null | ||
}); | ||
return proxy; | ||
} | ||
const ROW_ID = 0; | ||
const ROW_TAG = 1; | ||
const ROW_LENGTH = 2; | ||
const ROW_CHUNK_BY_NEWLINE = 3; | ||
const ROW_CHUNK_BY_LENGTH = 4; | ||
const PENDING = 'pending'; | ||
const BLOCKED = 'blocked'; | ||
const CYCLIC = 'cyclic'; | ||
const RESOLVED_MODEL = 'resolved_model'; | ||
const RESOLVED_MODULE = 'resolved_module'; | ||
const INITIALIZED = 'fulfilled'; | ||
const ERRORED = 'rejected'; // Dev-only | ||
// $FlowFixMe[missing-this-annot] | ||
function Chunk(status, value, reason, response) { | ||
function createBoundServerReference(metaData, callServer) { | ||
function action() { | ||
var args = Array.prototype.slice.call(arguments); | ||
return bound | ||
? "fulfilled" === bound.status | ||
? callServer(id, bound.value.concat(args)) | ||
: Promise.resolve(bound).then(function (boundArgs) { | ||
return callServer(id, boundArgs.concat(args)); | ||
}) | ||
: callServer(id, args); | ||
} | ||
var id = metaData.id, | ||
bound = metaData.bound; | ||
registerServerReference(action, { id: id, bound: bound }); | ||
return action; | ||
} | ||
function ReactPromise(status, value, reason, response) { | ||
this.status = status; | ||
@@ -657,78 +532,41 @@ this.value = value; | ||
this._response = response; | ||
} // We subclass Promise.prototype so that we get other methods like .catch | ||
Chunk.prototype = Object.create(Promise.prototype); // TODO: This doesn't return a new Promise chain unlike the real .then | ||
Chunk.prototype.then = function (resolve, reject) { | ||
const chunk = this; // If we have resolved content, we try to initialize it first which | ||
// might put us back into one of the other states. | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
} | ||
ReactPromise.prototype = Object.create(Promise.prototype); | ||
ReactPromise.prototype.then = function (resolve, reject) { | ||
switch (this.status) { | ||
case "resolved_model": | ||
initializeModelChunk(this); | ||
break; | ||
case RESOLVED_MODULE: | ||
initializeModuleChunk(chunk); | ||
case "resolved_module": | ||
initializeModuleChunk(this); | ||
} | ||
switch (this.status) { | ||
case "fulfilled": | ||
resolve(this.value); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
resolve(chunk.value); | ||
case "pending": | ||
case "blocked": | ||
resolve && | ||
(null === this.value && (this.value = []), this.value.push(resolve)); | ||
reject && | ||
(null === this.reason && (this.reason = []), this.reason.push(reject)); | ||
break; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
if (resolve) { | ||
if (chunk.value === null) { | ||
chunk.value = []; | ||
} | ||
chunk.value.push(resolve); | ||
} | ||
if (reject) { | ||
if (chunk.reason === null) { | ||
chunk.reason = []; | ||
} | ||
chunk.reason.push(reject); | ||
} | ||
break; | ||
default: | ||
reject(chunk.reason); | ||
break; | ||
reject && reject(this.reason); | ||
} | ||
}; | ||
function readChunk(chunk) { | ||
// If we have resolved content, we try to initialize it first which | ||
// might put us back into one of the other states. | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
case "resolved_model": | ||
initializeModelChunk(chunk); | ||
break; | ||
case RESOLVED_MODULE: | ||
case "resolved_module": | ||
initializeModuleChunk(chunk); | ||
break; | ||
} // The status might have changed after initialization. | ||
} | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
case "fulfilled": | ||
return chunk.value; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
// eslint-disable-next-line no-throw-literal | ||
case "pending": | ||
case "blocked": | ||
throw chunk; | ||
default: | ||
@@ -738,1051 +576,1078 @@ throw chunk.reason; | ||
} | ||
function getRoot(response) { | ||
const chunk = getChunk(response, 0); | ||
return chunk; | ||
} | ||
function createPendingChunk(response) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(PENDING, null, null, response); | ||
return new ReactPromise("pending", null, null, response); | ||
} | ||
function createBlockedChunk(response) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(BLOCKED, null, null, response); | ||
} | ||
function createErrorChunk(response, error) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(ERRORED, null, error, response); | ||
} | ||
function wakeChunk(listeners, value) { | ||
for (let i = 0; i < listeners.length; i++) { | ||
const listener = listeners[i]; | ||
listener(value); | ||
} | ||
for (var i = 0; i < listeners.length; i++) (0, listeners[i])(value); | ||
} | ||
function wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners) { | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
case "fulfilled": | ||
wakeChunk(resolveListeners, chunk.value); | ||
break; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
chunk.value = resolveListeners; | ||
chunk.reason = rejectListeners; | ||
case "pending": | ||
case "blocked": | ||
if (chunk.value) | ||
for (var i = 0; i < resolveListeners.length; i++) | ||
chunk.value.push(resolveListeners[i]); | ||
else chunk.value = resolveListeners; | ||
if (chunk.reason) { | ||
if (rejectListeners) | ||
for ( | ||
resolveListeners = 0; | ||
resolveListeners < rejectListeners.length; | ||
resolveListeners++ | ||
) | ||
chunk.reason.push(rejectListeners[resolveListeners]); | ||
} else chunk.reason = rejectListeners; | ||
break; | ||
case ERRORED: | ||
if (rejectListeners) { | ||
wakeChunk(rejectListeners, chunk.reason); | ||
} | ||
break; | ||
case "rejected": | ||
rejectListeners && wakeChunk(rejectListeners, chunk.reason); | ||
} | ||
} | ||
function triggerErrorOnChunk(chunk, error) { | ||
if (chunk.status !== PENDING && chunk.status !== BLOCKED) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" !== chunk.status && "blocked" !== chunk.status) | ||
chunk.reason.error(error); | ||
else { | ||
var listeners = chunk.reason; | ||
chunk.status = "rejected"; | ||
chunk.reason = error; | ||
null !== listeners && wakeChunk(listeners, error); | ||
} | ||
const listeners = chunk.reason; | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
if (listeners !== null) { | ||
wakeChunk(listeners, error); | ||
} | ||
} | ||
function createResolvedModelChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(RESOLVED_MODEL, value, null, response); | ||
function createResolvedIteratorResultChunk(response, value, done) { | ||
return new ReactPromise( | ||
"resolved_model", | ||
(done ? '{"done":true,"value":' : '{"done":false,"value":') + value + "}", | ||
null, | ||
response | ||
); | ||
} | ||
function createResolvedModuleChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(RESOLVED_MODULE, value, null, response); | ||
function resolveIteratorResultChunk(chunk, value, done) { | ||
resolveModelChunk( | ||
chunk, | ||
(done ? '{"done":true,"value":' : '{"done":false,"value":') + value + "}" | ||
); | ||
} | ||
function createInitializedTextChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(INITIALIZED, value, null, response); | ||
} | ||
function createInitializedBufferChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(INITIALIZED, value, null, response); | ||
} | ||
function resolveModelChunk(chunk, value) { | ||
if (chunk.status !== PENDING) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" !== chunk.status) chunk.reason.enqueueModel(value); | ||
else { | ||
var resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "resolved_model"; | ||
chunk.value = value; | ||
null !== resolveListeners && | ||
(initializeModelChunk(chunk), | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners)); | ||
} | ||
const resolveListeners = chunk.value; | ||
const rejectListeners = chunk.reason; | ||
const resolvedChunk = chunk; | ||
resolvedChunk.status = RESOLVED_MODEL; | ||
resolvedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
// This is unfortunate that we're reading this eagerly if | ||
// we already have listeners attached since they might no | ||
// longer be rendered or might not be the highest pri. | ||
initializeModelChunk(resolvedChunk); // The status might have changed after initialization. | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
} | ||
function resolveModuleChunk(chunk, value) { | ||
if (chunk.status !== PENDING && chunk.status !== BLOCKED) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" === chunk.status || "blocked" === chunk.status) { | ||
var resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "resolved_module"; | ||
chunk.value = value; | ||
null !== resolveListeners && | ||
(initializeModuleChunk(chunk), | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners)); | ||
} | ||
const resolveListeners = chunk.value; | ||
const rejectListeners = chunk.reason; | ||
const resolvedChunk = chunk; | ||
resolvedChunk.status = RESOLVED_MODULE; | ||
resolvedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
initializeModuleChunk(resolvedChunk); | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
} | ||
let initializingChunk = null; | ||
let initializingChunkBlockedModel = null; | ||
var initializingHandler = null; | ||
function initializeModelChunk(chunk) { | ||
const prevChunk = initializingChunk; | ||
const prevBlocked = initializingChunkBlockedModel; | ||
initializingChunk = chunk; | ||
initializingChunkBlockedModel = null; | ||
const resolvedModel = chunk.value; // We go to the CYCLIC state until we've fully resolved this. | ||
// We do this before parsing in case we try to initialize the same chunk | ||
// while parsing the model. Such as in a cyclic reference. | ||
const cyclicChunk = chunk; | ||
cyclicChunk.status = CYCLIC; | ||
cyclicChunk.value = null; | ||
cyclicChunk.reason = null; | ||
var prevHandler = initializingHandler; | ||
initializingHandler = null; | ||
var resolvedModel = chunk.value; | ||
chunk.status = "blocked"; | ||
chunk.value = null; | ||
chunk.reason = null; | ||
try { | ||
const value = parseModel(chunk._response, resolvedModel); | ||
if (initializingChunkBlockedModel !== null && initializingChunkBlockedModel.deps > 0) { | ||
initializingChunkBlockedModel.value = value; // We discovered new dependencies on modules that are not yet resolved. | ||
// We have to go the BLOCKED state until they're resolved. | ||
const blockedChunk = chunk; | ||
blockedChunk.status = BLOCKED; | ||
blockedChunk.value = null; | ||
blockedChunk.reason = null; | ||
} else { | ||
const resolveListeners = cyclicChunk.value; | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
wakeChunk(resolveListeners, value); | ||
var value = JSON.parse(resolvedModel, chunk._response._fromJSON), | ||
resolveListeners = chunk.value; | ||
null !== resolveListeners && | ||
((chunk.value = null), | ||
(chunk.reason = null), | ||
wakeChunk(resolveListeners, value)); | ||
if (null !== initializingHandler) { | ||
if (initializingHandler.errored) throw initializingHandler.value; | ||
if (0 < initializingHandler.deps) { | ||
initializingHandler.value = value; | ||
initializingHandler.chunk = chunk; | ||
return; | ||
} | ||
} | ||
chunk.status = "fulfilled"; | ||
chunk.value = value; | ||
} catch (error) { | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
(chunk.status = "rejected"), (chunk.reason = error); | ||
} finally { | ||
initializingChunk = prevChunk; | ||
initializingChunkBlockedModel = prevBlocked; | ||
initializingHandler = prevHandler; | ||
} | ||
} | ||
function initializeModuleChunk(chunk) { | ||
try { | ||
const value = requireModule(chunk.value); | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = value; | ||
var value = requireModule(chunk.value); | ||
chunk.status = "fulfilled"; | ||
chunk.value = value; | ||
} catch (error) { | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
(chunk.status = "rejected"), (chunk.reason = error); | ||
} | ||
} // Report that any missing chunks in the model is now going to throw this | ||
// error upon read. Also notify any pending promises. | ||
} | ||
function reportGlobalError(response, error) { | ||
response._chunks.forEach(chunk => { | ||
// If this chunk was already resolved or errored, it won't | ||
// trigger an error but if it wasn't then we need to | ||
// because we won't be getting any new data to resolve it. | ||
if (chunk.status === PENDING) { | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
response._chunks.forEach(function (chunk) { | ||
"pending" === chunk.status && triggerErrorOnChunk(chunk, error); | ||
}); | ||
} | ||
function createElement(type, key, props) { | ||
const element = { | ||
// This tag allows us to uniquely identify this as a React Element | ||
$$typeof: REACT_ELEMENT_TYPE, | ||
// Built-in properties that belong on the element | ||
type: type, | ||
key: key, | ||
ref: null, | ||
props: props, | ||
// Record the component responsible for creating this element. | ||
_owner: null | ||
}; | ||
return element; | ||
} | ||
function createLazyChunkWrapper(chunk) { | ||
const lazyType = { | ||
$$typeof: REACT_LAZY_TYPE, | ||
_payload: chunk, | ||
_init: readChunk | ||
}; | ||
return lazyType; | ||
return { $$typeof: REACT_LAZY_TYPE, _payload: chunk, _init: readChunk }; | ||
} | ||
function getChunk(response, id) { | ||
const chunks = response._chunks; | ||
let chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunk = createPendingChunk(response); | ||
chunks.set(id, chunk); | ||
} | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk || ((chunk = createPendingChunk(response)), chunks.set(id, chunk)); | ||
return chunk; | ||
} | ||
function createModelResolver(chunk, parentObject, key, cyclic) { | ||
let blocked; | ||
if (initializingChunkBlockedModel) { | ||
blocked = initializingChunkBlockedModel; | ||
if (!cyclic) { | ||
blocked.deps++; | ||
function waitForReference( | ||
referencedChunk, | ||
parentObject, | ||
key, | ||
response, | ||
map, | ||
path | ||
) { | ||
function fulfill(value) { | ||
for (var i = 1; i < path.length; i++) { | ||
for (; value.$$typeof === REACT_LAZY_TYPE; ) | ||
if (((value = value._payload), value === handler.chunk)) | ||
value = handler.value; | ||
else if ("fulfilled" === value.status) value = value.value; | ||
else { | ||
path.splice(0, i - 1); | ||
value.then(fulfill, reject); | ||
return; | ||
} | ||
value = value[path[i]]; | ||
} | ||
} else { | ||
blocked = initializingChunkBlockedModel = { | ||
deps: cyclic ? 0 : 1, | ||
value: null | ||
i = map(response, value, parentObject, key); | ||
parentObject[key] = i; | ||
"" === key && null === handler.value && (handler.value = i); | ||
if ( | ||
parentObject[0] === REACT_ELEMENT_TYPE && | ||
"object" === typeof handler.value && | ||
null !== handler.value && | ||
handler.value.$$typeof === REACT_ELEMENT_TYPE | ||
) | ||
switch (((value = handler.value), key)) { | ||
case "3": | ||
value.props = i; | ||
} | ||
handler.deps--; | ||
0 === handler.deps && | ||
((i = handler.chunk), | ||
null !== i && | ||
"blocked" === i.status && | ||
((value = i.value), | ||
(i.status = "fulfilled"), | ||
(i.value = handler.value), | ||
null !== value && wakeChunk(value, handler.value))); | ||
} | ||
function reject(error) { | ||
if (!handler.errored) { | ||
handler.errored = !0; | ||
handler.value = error; | ||
var chunk = handler.chunk; | ||
null !== chunk && | ||
"blocked" === chunk.status && | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
} | ||
if (initializingHandler) { | ||
var handler = initializingHandler; | ||
handler.deps++; | ||
} else | ||
handler = initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: null, | ||
deps: 1, | ||
errored: !1 | ||
}; | ||
} | ||
return value => { | ||
parentObject[key] = value; | ||
blocked.deps--; | ||
if (blocked.deps === 0) { | ||
if (chunk.status !== BLOCKED) { | ||
return; | ||
referencedChunk.then(fulfill, reject); | ||
return null; | ||
} | ||
function loadServerReference(response, metaData, parentObject, key) { | ||
if (!response._serverReferenceConfig) | ||
return createBoundServerReference(metaData, response._callServer); | ||
var serverReference = resolveServerReference( | ||
response._serverReferenceConfig, | ||
metaData.id | ||
); | ||
if ((response = preloadModule(serverReference))) | ||
metaData.bound && (response = Promise.all([response, metaData.bound])); | ||
else if (metaData.bound) response = Promise.resolve(metaData.bound); | ||
else return requireModule(serverReference); | ||
if (initializingHandler) { | ||
var handler = initializingHandler; | ||
handler.deps++; | ||
} else | ||
handler = initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: null, | ||
deps: 1, | ||
errored: !1 | ||
}; | ||
response.then( | ||
function () { | ||
var resolvedValue = requireModule(serverReference); | ||
if (metaData.bound) { | ||
var boundArgs = metaData.bound.value.slice(0); | ||
boundArgs.unshift(null); | ||
resolvedValue = resolvedValue.bind.apply(resolvedValue, boundArgs); | ||
} | ||
const resolveListeners = chunk.value; | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = blocked.value; | ||
if (resolveListeners !== null) { | ||
wakeChunk(resolveListeners, blocked.value); | ||
parentObject[key] = resolvedValue; | ||
"" === key && null === handler.value && (handler.value = resolvedValue); | ||
if ( | ||
parentObject[0] === REACT_ELEMENT_TYPE && | ||
"object" === typeof handler.value && | ||
null !== handler.value && | ||
handler.value.$$typeof === REACT_ELEMENT_TYPE | ||
) | ||
switch (((boundArgs = handler.value), key)) { | ||
case "3": | ||
boundArgs.props = resolvedValue; | ||
} | ||
handler.deps--; | ||
0 === handler.deps && | ||
((resolvedValue = handler.chunk), | ||
null !== resolvedValue && | ||
"blocked" === resolvedValue.status && | ||
((boundArgs = resolvedValue.value), | ||
(resolvedValue.status = "fulfilled"), | ||
(resolvedValue.value = handler.value), | ||
null !== boundArgs && wakeChunk(boundArgs, handler.value))); | ||
}, | ||
function (error) { | ||
if (!handler.errored) { | ||
handler.errored = !0; | ||
handler.value = error; | ||
var chunk = handler.chunk; | ||
null !== chunk && | ||
"blocked" === chunk.status && | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
} | ||
}; | ||
); | ||
return null; | ||
} | ||
function createModelReject(chunk) { | ||
return error => triggerErrorOnChunk(chunk, error); | ||
} | ||
function createServerReferenceProxy(response, metaData) { | ||
const callServer = response._callServer; | ||
const proxy = function () { | ||
// $FlowFixMe[method-unbinding] | ||
const args = Array.prototype.slice.call(arguments); | ||
const p = metaData.bound; | ||
if (!p) { | ||
return callServer(metaData.id, args); | ||
} | ||
if (p.status === INITIALIZED) { | ||
const bound = p.value; | ||
return callServer(metaData.id, bound.concat(args)); | ||
} // Since this is a fake Promise whose .then doesn't chain, we have to wrap it. | ||
// TODO: Remove the wrapper once that's fixed. | ||
return Promise.resolve(p).then(function (bound) { | ||
return callServer(metaData.id, bound.concat(args)); | ||
}); | ||
}; | ||
registerServerReference(proxy, metaData); | ||
return proxy; | ||
} | ||
function getOutlinedModel(response, id) { | ||
const chunk = getChunk(response, id); | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
function getOutlinedModel(response, reference, parentObject, key, map) { | ||
reference = reference.split(":"); | ||
var id = parseInt(reference[0], 16); | ||
id = getChunk(response, id); | ||
switch (id.status) { | ||
case "resolved_model": | ||
initializeModelChunk(id); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
{ | ||
return chunk.value; | ||
case "resolved_module": | ||
initializeModuleChunk(id); | ||
} | ||
switch (id.status) { | ||
case "fulfilled": | ||
var value = id.value; | ||
for (id = 1; id < reference.length; id++) { | ||
for (; value.$$typeof === REACT_LAZY_TYPE; ) | ||
if (((value = value._payload), "fulfilled" === value.status)) | ||
value = value.value; | ||
else | ||
return waitForReference( | ||
value, | ||
parentObject, | ||
key, | ||
response, | ||
map, | ||
reference.slice(id - 1) | ||
); | ||
value = value[reference[id]]; | ||
} | ||
// We always encode it first in the stream so it won't be pending. | ||
return map(response, value, parentObject, key); | ||
case "pending": | ||
case "blocked": | ||
return waitForReference(id, parentObject, key, response, map, reference); | ||
default: | ||
throw chunk.reason; | ||
return ( | ||
initializingHandler | ||
? ((initializingHandler.errored = !0), | ||
(initializingHandler.value = id.reason)) | ||
: (initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: id.reason, | ||
deps: 0, | ||
errored: !0 | ||
}), | ||
null | ||
); | ||
} | ||
} | ||
function createMap(response, model) { | ||
return new Map(model); | ||
} | ||
function createSet(response, model) { | ||
return new Set(model); | ||
} | ||
function createBlob(response, model) { | ||
return new Blob(model.slice(1), { type: model[0] }); | ||
} | ||
function createFormData(response, model) { | ||
response = new FormData(); | ||
for (var i = 0; i < model.length; i++) | ||
response.append(model[i][0], model[i][1]); | ||
return response; | ||
} | ||
function extractIterator(response, model) { | ||
return model[Symbol.iterator](); | ||
} | ||
function createModel(response, model) { | ||
return model; | ||
} | ||
function parseModelString(response, parentObject, key, value) { | ||
if (value[0] === '$') { | ||
if (value === '$') { | ||
// A very common symbol. | ||
return REACT_ELEMENT_TYPE; | ||
} | ||
if ("$" === value[0]) { | ||
if ("$" === value) | ||
return ( | ||
null !== initializingHandler && | ||
"0" === key && | ||
(initializingHandler = { | ||
parent: initializingHandler, | ||
chunk: null, | ||
value: null, | ||
deps: 0, | ||
errored: !1 | ||
}), | ||
REACT_ELEMENT_TYPE | ||
); | ||
switch (value[1]) { | ||
case '$': | ||
{ | ||
// This was an escaped string value. | ||
return value.slice(1); | ||
} | ||
case 'L': | ||
{ | ||
// Lazy node | ||
const id = parseInt(value.slice(2), 16); | ||
const chunk = getChunk(response, id); // We create a React.lazy wrapper around any lazy values. | ||
// When passed into React, we'll know how to suspend on this. | ||
return createLazyChunkWrapper(chunk); | ||
} | ||
case '@': | ||
{ | ||
// Promise | ||
const id = parseInt(value.slice(2), 16); | ||
const chunk = getChunk(response, id); | ||
return chunk; | ||
} | ||
case 'S': | ||
{ | ||
// Symbol | ||
return Symbol.for(value.slice(2)); | ||
} | ||
case 'F': | ||
{ | ||
// Server Reference | ||
const id = parseInt(value.slice(2), 16); | ||
const metadata = getOutlinedModel(response, id); | ||
return createServerReferenceProxy(response, metadata); | ||
} | ||
case 'Q': | ||
{ | ||
// Map | ||
const id = parseInt(value.slice(2), 16); | ||
const data = getOutlinedModel(response, id); | ||
return new Map(data); | ||
} | ||
case 'W': | ||
{ | ||
// Set | ||
const id = parseInt(value.slice(2), 16); | ||
const data = getOutlinedModel(response, id); | ||
return new Set(data); | ||
} | ||
case 'I': | ||
{ | ||
// $Infinity | ||
return Infinity; | ||
} | ||
case '-': | ||
{ | ||
// $-0 or $-Infinity | ||
if (value === '$-0') { | ||
return -0; | ||
} else { | ||
return -Infinity; | ||
} | ||
} | ||
case 'N': | ||
{ | ||
// $NaN | ||
return NaN; | ||
} | ||
case 'u': | ||
{ | ||
// matches "$undefined" | ||
// Special encoding for `undefined` which can't be serialized as JSON otherwise. | ||
return undefined; | ||
} | ||
case 'D': | ||
{ | ||
// Date | ||
return new Date(Date.parse(value.slice(2))); | ||
} | ||
case 'n': | ||
{ | ||
// BigInt | ||
return BigInt(value.slice(2)); | ||
} | ||
case "$": | ||
return value.slice(1); | ||
case "L": | ||
return ( | ||
(parentObject = parseInt(value.slice(2), 16)), | ||
(response = getChunk(response, parentObject)), | ||
createLazyChunkWrapper(response) | ||
); | ||
case "@": | ||
if (2 === value.length) return new Promise(function () {}); | ||
parentObject = parseInt(value.slice(2), 16); | ||
return getChunk(response, parentObject); | ||
case "S": | ||
return Symbol.for(value.slice(2)); | ||
case "F": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel( | ||
response, | ||
value, | ||
parentObject, | ||
key, | ||
loadServerReference | ||
) | ||
); | ||
case "T": | ||
parentObject = "$" + value.slice(2); | ||
response = response._tempRefs; | ||
if (null == response) | ||
throw Error( | ||
"Missing a temporary reference set but the RSC response returned a temporary reference. Pass a temporaryReference option with the set that was used with the reply." | ||
); | ||
return response.get(parentObject); | ||
case "Q": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createMap) | ||
); | ||
case "W": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createSet) | ||
); | ||
case "B": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createBlob) | ||
); | ||
case "K": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createFormData) | ||
); | ||
case "Z": | ||
return resolveErrorProd(); | ||
case "i": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, extractIterator) | ||
); | ||
case "I": | ||
return Infinity; | ||
case "-": | ||
return "$-0" === value ? -0 : -Infinity; | ||
case "N": | ||
return NaN; | ||
case "u": | ||
return; | ||
case "D": | ||
return new Date(Date.parse(value.slice(2))); | ||
case "n": | ||
return BigInt(value.slice(2)); | ||
default: | ||
{ | ||
// We assume that anything else is a reference ID. | ||
const id = parseInt(value.slice(1), 16); | ||
const chunk = getChunk(response, id); | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
break; | ||
case RESOLVED_MODULE: | ||
initializeModuleChunk(chunk); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
const chunkValue = chunk.value; | ||
return chunkValue; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
const parentChunk = initializingChunk; | ||
chunk.then(createModelResolver(parentChunk, parentObject, key, chunk.status === CYCLIC), createModelReject(parentChunk)); | ||
return null; | ||
default: | ||
throw chunk.reason; | ||
} | ||
} | ||
return ( | ||
(value = value.slice(1)), | ||
getOutlinedModel(response, value, parentObject, key, createModel) | ||
); | ||
} | ||
} | ||
return value; | ||
} | ||
function parseModelTuple(response, value) { | ||
const tuple = value; | ||
if (tuple[0] === REACT_ELEMENT_TYPE) { | ||
// TODO: Consider having React just directly accept these arrays as elements. | ||
// Or even change the ReactElement type to be an array. | ||
return createElement(tuple[1], tuple[2], tuple[3]); | ||
} | ||
return value; | ||
} | ||
function missingCall() { | ||
throw new Error('Trying to call a function from "use server" but the callServer option ' + 'was not implemented in your router runtime.'); | ||
throw Error( | ||
'Trying to call a function from "use server" but the callServer option was not implemented in your router runtime.' | ||
); | ||
} | ||
function createResponse(bundlerConfig, moduleLoading, callServer, nonce) { | ||
const chunks = new Map(); | ||
const response = { | ||
_bundlerConfig: bundlerConfig, | ||
_moduleLoading: moduleLoading, | ||
_callServer: callServer !== undefined ? callServer : missingCall, | ||
_nonce: nonce, | ||
_chunks: chunks, | ||
_stringDecoder: createStringDecoder(), | ||
_fromJSON: null, | ||
_rowState: 0, | ||
_rowID: 0, | ||
_rowTag: 0, | ||
_rowLength: 0, | ||
_buffer: [] | ||
}; // Don't inline this call because it causes closure to outline the call above. | ||
response._fromJSON = createFromJSONCallback(response); | ||
return response; | ||
function ResponseInstance( | ||
bundlerConfig, | ||
serverReferenceConfig, | ||
moduleLoading, | ||
callServer, | ||
encodeFormAction, | ||
nonce, | ||
temporaryReferences | ||
) { | ||
var chunks = new Map(); | ||
this._bundlerConfig = bundlerConfig; | ||
this._serverReferenceConfig = serverReferenceConfig; | ||
this._moduleLoading = moduleLoading; | ||
this._callServer = void 0 !== callServer ? callServer : missingCall; | ||
this._encodeFormAction = encodeFormAction; | ||
this._nonce = nonce; | ||
this._chunks = chunks; | ||
this._stringDecoder = new TextDecoder(); | ||
this._fromJSON = null; | ||
this._rowLength = this._rowTag = this._rowID = this._rowState = 0; | ||
this._buffer = []; | ||
this._tempRefs = temporaryReferences; | ||
this._fromJSON = createFromJSONCallback(this); | ||
} | ||
function resolveModel(response, id, model) { | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createResolvedModelChunk(response, model)); | ||
} else { | ||
resolveModelChunk(chunk, model); | ||
} | ||
function resolveBuffer(response, id, buffer) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk && "pending" !== chunk.status | ||
? chunk.reason.enqueueValue(buffer) | ||
: chunks.set(id, new ReactPromise("fulfilled", buffer, null, response)); | ||
} | ||
function resolveText(response, id, text) { | ||
const chunks = response._chunks; // We assume that we always reference large strings after they've been | ||
// emitted. | ||
chunks.set(id, createInitializedTextChunk(response, text)); | ||
function resolveModule(response, id, model) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
model = JSON.parse(model, response._fromJSON); | ||
var clientReference = resolveClientReference(response._bundlerConfig, model); | ||
if ((model = preloadModule(clientReference))) { | ||
if (chunk) { | ||
var blockedChunk = chunk; | ||
blockedChunk.status = "blocked"; | ||
} else | ||
(blockedChunk = new ReactPromise("blocked", null, null, response)), | ||
chunks.set(id, blockedChunk); | ||
model.then( | ||
function () { | ||
return resolveModuleChunk(blockedChunk, clientReference); | ||
}, | ||
function (error) { | ||
return triggerErrorOnChunk(blockedChunk, error); | ||
} | ||
); | ||
} else | ||
chunk | ||
? resolveModuleChunk(chunk, clientReference) | ||
: chunks.set( | ||
id, | ||
new ReactPromise("resolved_module", clientReference, null, response) | ||
); | ||
} | ||
function resolveBuffer(response, id, buffer) { | ||
const chunks = response._chunks; // We assume that we always reference buffers after they've been emitted. | ||
chunks.set(id, createInitializedBufferChunk(response, buffer)); | ||
function resolveStream(response, id, stream, controller) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk | ||
? "pending" === chunk.status && | ||
((response = chunk.value), | ||
(chunk.status = "fulfilled"), | ||
(chunk.value = stream), | ||
(chunk.reason = controller), | ||
null !== response && wakeChunk(response, chunk.value)) | ||
: chunks.set( | ||
id, | ||
new ReactPromise("fulfilled", stream, controller, response) | ||
); | ||
} | ||
function resolveModule(response, id, model) { | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
const clientReferenceMetadata = parseModel(response, model); | ||
const clientReference = resolveClientReference(response._bundlerConfig, clientReferenceMetadata); | ||
// For now we preload all modules as early as possible since it's likely | ||
// that we'll need them. | ||
const promise = preloadModule(clientReference); | ||
if (promise) { | ||
let blockedChunk; | ||
if (!chunk) { | ||
// Technically, we should just treat promise as the chunk in this | ||
// case. Because it'll just behave as any other promise. | ||
blockedChunk = createBlockedChunk(response); | ||
chunks.set(id, blockedChunk); | ||
} else { | ||
// This can't actually happen because we don't have any forward | ||
// references to modules. | ||
blockedChunk = chunk; | ||
blockedChunk.status = BLOCKED; | ||
function startReadableStream(response, id, type) { | ||
var controller = null; | ||
type = new ReadableStream({ | ||
type: type, | ||
start: function (c) { | ||
controller = c; | ||
} | ||
promise.then(() => resolveModuleChunk(blockedChunk, clientReference), error => triggerErrorOnChunk(blockedChunk, error)); | ||
} else { | ||
if (!chunk) { | ||
chunks.set(id, createResolvedModuleChunk(response, clientReference)); | ||
} else { | ||
// This can't actually happen because we don't have any forward | ||
// references to modules. | ||
resolveModuleChunk(chunk, clientReference); | ||
}); | ||
var previousBlockedChunk = null; | ||
resolveStream(response, id, type, { | ||
enqueueValue: function (value) { | ||
null === previousBlockedChunk | ||
? controller.enqueue(value) | ||
: previousBlockedChunk.then(function () { | ||
controller.enqueue(value); | ||
}); | ||
}, | ||
enqueueModel: function (json) { | ||
if (null === previousBlockedChunk) { | ||
var chunk = new ReactPromise("resolved_model", json, null, response); | ||
initializeModelChunk(chunk); | ||
"fulfilled" === chunk.status | ||
? controller.enqueue(chunk.value) | ||
: (chunk.then( | ||
function (v) { | ||
return controller.enqueue(v); | ||
}, | ||
function (e) { | ||
return controller.error(e); | ||
} | ||
), | ||
(previousBlockedChunk = chunk)); | ||
} else { | ||
chunk = previousBlockedChunk; | ||
var chunk$52 = createPendingChunk(response); | ||
chunk$52.then( | ||
function (v) { | ||
return controller.enqueue(v); | ||
}, | ||
function (e) { | ||
return controller.error(e); | ||
} | ||
); | ||
previousBlockedChunk = chunk$52; | ||
chunk.then(function () { | ||
previousBlockedChunk === chunk$52 && (previousBlockedChunk = null); | ||
resolveModelChunk(chunk$52, json); | ||
}); | ||
} | ||
}, | ||
close: function () { | ||
if (null === previousBlockedChunk) controller.close(); | ||
else { | ||
var blockedChunk = previousBlockedChunk; | ||
previousBlockedChunk = null; | ||
blockedChunk.then(function () { | ||
return controller.close(); | ||
}); | ||
} | ||
}, | ||
error: function (error) { | ||
if (null === previousBlockedChunk) controller.error(error); | ||
else { | ||
var blockedChunk = previousBlockedChunk; | ||
previousBlockedChunk = null; | ||
blockedChunk.then(function () { | ||
return controller.error(error); | ||
}); | ||
} | ||
} | ||
} | ||
}); | ||
} | ||
function resolveErrorProd(response, id, digest) { | ||
const error = new Error('An error occurred in the Server Components render. The specific message is omitted in production' + ' builds to avoid leaking sensitive details. A digest property is included on this error instance which' + ' may provide additional details about the nature of the error.'); | ||
error.stack = 'Error: ' + error.message; | ||
error.digest = digest; | ||
const errorWithDigest = error; | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createErrorChunk(response, errorWithDigest)); | ||
} else { | ||
triggerErrorOnChunk(chunk, errorWithDigest); | ||
} | ||
function asyncIterator() { | ||
return this; | ||
} | ||
function resolvePostponeProd(response, id) { | ||
const error = new Error('A Server Component was postponed. The reason is omitted in production' + ' builds to avoid leaking sensitive details.'); | ||
const postponeInstance = error; | ||
postponeInstance.$$typeof = REACT_POSTPONE_TYPE; | ||
postponeInstance.stack = 'Error: ' + error.message; | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createErrorChunk(response, postponeInstance)); | ||
} else { | ||
triggerErrorOnChunk(chunk, postponeInstance); | ||
} | ||
function createIterator(next) { | ||
next = { next: next }; | ||
next[ASYNC_ITERATOR] = asyncIterator; | ||
return next; | ||
} | ||
function startAsyncIterable(response, id, iterator) { | ||
var buffer = [], | ||
closed = !1, | ||
nextWriteIndex = 0, | ||
$jscomp$compprop0 = {}; | ||
$jscomp$compprop0 = | ||
(($jscomp$compprop0[ASYNC_ITERATOR] = function () { | ||
var nextReadIndex = 0; | ||
return createIterator(function (arg) { | ||
if (void 0 !== arg) | ||
throw Error( | ||
"Values cannot be passed to next() of AsyncIterables passed to Client Components." | ||
); | ||
if (nextReadIndex === buffer.length) { | ||
if (closed) | ||
return new ReactPromise( | ||
"fulfilled", | ||
{ done: !0, value: void 0 }, | ||
null, | ||
response | ||
); | ||
buffer[nextReadIndex] = createPendingChunk(response); | ||
} | ||
return buffer[nextReadIndex++]; | ||
}); | ||
}), | ||
$jscomp$compprop0); | ||
resolveStream( | ||
response, | ||
id, | ||
iterator ? $jscomp$compprop0[ASYNC_ITERATOR]() : $jscomp$compprop0, | ||
{ | ||
enqueueValue: function (value) { | ||
if (nextWriteIndex === buffer.length) | ||
buffer[nextWriteIndex] = new ReactPromise( | ||
"fulfilled", | ||
{ done: !1, value: value }, | ||
null, | ||
response | ||
); | ||
else { | ||
var chunk = buffer[nextWriteIndex], | ||
resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "fulfilled"; | ||
chunk.value = { done: !1, value: value }; | ||
null !== resolveListeners && | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
nextWriteIndex++; | ||
}, | ||
enqueueModel: function (value) { | ||
nextWriteIndex === buffer.length | ||
? (buffer[nextWriteIndex] = createResolvedIteratorResultChunk( | ||
response, | ||
value, | ||
!1 | ||
)) | ||
: resolveIteratorResultChunk(buffer[nextWriteIndex], value, !1); | ||
nextWriteIndex++; | ||
}, | ||
close: function (value) { | ||
closed = !0; | ||
nextWriteIndex === buffer.length | ||
? (buffer[nextWriteIndex] = createResolvedIteratorResultChunk( | ||
response, | ||
value, | ||
!0 | ||
)) | ||
: resolveIteratorResultChunk(buffer[nextWriteIndex], value, !0); | ||
for (nextWriteIndex++; nextWriteIndex < buffer.length; ) | ||
resolveIteratorResultChunk( | ||
buffer[nextWriteIndex++], | ||
'"$undefined"', | ||
!0 | ||
); | ||
}, | ||
error: function (error) { | ||
closed = !0; | ||
for ( | ||
nextWriteIndex === buffer.length && | ||
(buffer[nextWriteIndex] = createPendingChunk(response)); | ||
nextWriteIndex < buffer.length; | ||
function resolveHint(response, code, model) { | ||
const hintModel = parseModel(response, model); | ||
dispatchHint(code, hintModel); | ||
) | ||
triggerErrorOnChunk(buffer[nextWriteIndex++], error); | ||
} | ||
} | ||
); | ||
} | ||
function resolveErrorProd() { | ||
var error = Error( | ||
"An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error." | ||
); | ||
error.stack = "Error: " + error.message; | ||
return error; | ||
} | ||
function mergeBuffer(buffer, lastChunk) { | ||
const l = buffer.length; // Count the bytes we'll need | ||
let byteLength = lastChunk.length; | ||
for (let i = 0; i < l; i++) { | ||
for (var l = buffer.length, byteLength = lastChunk.length, i = 0; i < l; i++) | ||
byteLength += buffer[i].byteLength; | ||
} // Allocate enough contiguous space | ||
const result = new Uint8Array(byteLength); | ||
let offset = 0; // Copy all the buffers into it. | ||
for (let i = 0; i < l; i++) { | ||
const chunk = buffer[i]; | ||
result.set(chunk, offset); | ||
offset += chunk.byteLength; | ||
byteLength = new Uint8Array(byteLength); | ||
for (var i$53 = (i = 0); i$53 < l; i$53++) { | ||
var chunk = buffer[i$53]; | ||
byteLength.set(chunk, i); | ||
i += chunk.byteLength; | ||
} | ||
result.set(lastChunk, offset); | ||
return result; | ||
byteLength.set(lastChunk, i); | ||
return byteLength; | ||
} | ||
function resolveTypedArray(response, id, buffer, lastChunk, constructor, bytesPerElement) { | ||
// If the view fits into one original buffer, we just reuse that buffer instead of | ||
// copying it out to a separate copy. This means that it's not always possible to | ||
// transfer these values to other threads without copying first since they may | ||
// share array buffer. For this to work, it must also have bytes aligned to a | ||
// multiple of a size of the type. | ||
const chunk = buffer.length === 0 && lastChunk.byteOffset % bytesPerElement === 0 ? lastChunk : mergeBuffer(buffer, lastChunk); // TODO: The transfer protocol of RSC is little-endian. If the client isn't little-endian | ||
// we should convert it instead. In practice big endian isn't really Web compatible so it's | ||
// somewhat safe to assume that browsers aren't going to run it, but maybe there's some SSR | ||
// server that's affected. | ||
const view = new constructor(chunk.buffer, chunk.byteOffset, chunk.byteLength / bytesPerElement); | ||
resolveBuffer(response, id, view); | ||
function resolveTypedArray( | ||
response, | ||
id, | ||
buffer, | ||
lastChunk, | ||
constructor, | ||
bytesPerElement | ||
) { | ||
buffer = | ||
0 === buffer.length && 0 === lastChunk.byteOffset % bytesPerElement | ||
? lastChunk | ||
: mergeBuffer(buffer, lastChunk); | ||
constructor = new constructor( | ||
buffer.buffer, | ||
buffer.byteOffset, | ||
buffer.byteLength / bytesPerElement | ||
); | ||
resolveBuffer(response, id, constructor); | ||
} | ||
function processFullRow(response, id, tag, buffer, chunk) { | ||
{ | ||
switch (tag) { | ||
case 65 | ||
/* "A" */ | ||
: | ||
// We must always clone to extract it into a separate buffer instead of just a view. | ||
resolveBuffer(response, id, mergeBuffer(buffer, chunk).buffer); | ||
return; | ||
case 67 | ||
/* "C" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int8Array, 1); | ||
return; | ||
case 99 | ||
/* "c" */ | ||
: | ||
resolveBuffer(response, id, buffer.length === 0 ? chunk : mergeBuffer(buffer, chunk)); | ||
return; | ||
case 85 | ||
/* "U" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint8ClampedArray, 1); | ||
return; | ||
case 83 | ||
/* "S" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int16Array, 2); | ||
return; | ||
case 115 | ||
/* "s" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint16Array, 2); | ||
return; | ||
case 76 | ||
/* "L" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int32Array, 4); | ||
return; | ||
case 108 | ||
/* "l" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint32Array, 4); | ||
return; | ||
case 70 | ||
/* "F" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Float32Array, 4); | ||
return; | ||
case 100 | ||
/* "d" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Float64Array, 8); | ||
return; | ||
case 78 | ||
/* "N" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, BigInt64Array, 8); | ||
return; | ||
case 109 | ||
/* "m" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, BigUint64Array, 8); | ||
return; | ||
case 86 | ||
/* "V" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, DataView, 1); | ||
return; | ||
} | ||
function processFullBinaryRow(response, id, tag, buffer, chunk) { | ||
switch (tag) { | ||
case 65: | ||
resolveBuffer(response, id, mergeBuffer(buffer, chunk).buffer); | ||
return; | ||
case 79: | ||
resolveTypedArray(response, id, buffer, chunk, Int8Array, 1); | ||
return; | ||
case 111: | ||
resolveBuffer( | ||
response, | ||
id, | ||
0 === buffer.length ? chunk : mergeBuffer(buffer, chunk) | ||
); | ||
return; | ||
case 85: | ||
resolveTypedArray(response, id, buffer, chunk, Uint8ClampedArray, 1); | ||
return; | ||
case 83: | ||
resolveTypedArray(response, id, buffer, chunk, Int16Array, 2); | ||
return; | ||
case 115: | ||
resolveTypedArray(response, id, buffer, chunk, Uint16Array, 2); | ||
return; | ||
case 76: | ||
resolveTypedArray(response, id, buffer, chunk, Int32Array, 4); | ||
return; | ||
case 108: | ||
resolveTypedArray(response, id, buffer, chunk, Uint32Array, 4); | ||
return; | ||
case 71: | ||
resolveTypedArray(response, id, buffer, chunk, Float32Array, 4); | ||
return; | ||
case 103: | ||
resolveTypedArray(response, id, buffer, chunk, Float64Array, 8); | ||
return; | ||
case 77: | ||
resolveTypedArray(response, id, buffer, chunk, BigInt64Array, 8); | ||
return; | ||
case 109: | ||
resolveTypedArray(response, id, buffer, chunk, BigUint64Array, 8); | ||
return; | ||
case 86: | ||
resolveTypedArray(response, id, buffer, chunk, DataView, 1); | ||
return; | ||
} | ||
const stringDecoder = response._stringDecoder; | ||
let row = ''; | ||
for (let i = 0; i < buffer.length; i++) { | ||
row += readPartialStringChunk(stringDecoder, buffer[i]); | ||
} | ||
row += readFinalStringChunk(stringDecoder, chunk); | ||
for ( | ||
var stringDecoder = response._stringDecoder, row = "", i = 0; | ||
i < buffer.length; | ||
i++ | ||
) | ||
row += stringDecoder.decode(buffer[i], decoderOptions); | ||
buffer = row += stringDecoder.decode(chunk); | ||
switch (tag) { | ||
case 73 | ||
/* "I" */ | ||
: | ||
{ | ||
resolveModule(response, id, row); | ||
return; | ||
} | ||
case 72 | ||
/* "H" */ | ||
: | ||
{ | ||
const code = row[0]; | ||
resolveHint(response, code, row.slice(1)); | ||
return; | ||
} | ||
case 69 | ||
/* "E" */ | ||
: | ||
{ | ||
const errorInfo = JSON.parse(row); | ||
{ | ||
resolveErrorProd(response, id, errorInfo.digest); | ||
} | ||
return; | ||
} | ||
case 84 | ||
/* "T" */ | ||
: | ||
{ | ||
resolveText(response, id, row); | ||
return; | ||
} | ||
case 68 | ||
/* "D" */ | ||
: | ||
{ | ||
throw new Error('Failed to read a RSC payload created by a development version of React ' + 'on the server while using a production version on the client. Always use ' + 'matching versions on the server and the client.'); | ||
} | ||
case 80 | ||
/* "P" */ | ||
: | ||
{ | ||
{ | ||
{ | ||
resolvePostponeProd(response, id); | ||
} | ||
return; | ||
} | ||
} | ||
// Fallthrough | ||
default: | ||
/* """ "{" "[" "t" "f" "n" "0" - "9" */ | ||
{ | ||
// We assume anything else is JSON. | ||
resolveModel(response, id, row); | ||
return; | ||
} | ||
} | ||
} | ||
function processBinaryChunk(response, chunk) { | ||
let i = 0; | ||
let rowState = response._rowState; | ||
let rowID = response._rowID; | ||
let rowTag = response._rowTag; | ||
let rowLength = response._rowLength; | ||
const buffer = response._buffer; | ||
const chunkLength = chunk.length; | ||
while (i < chunkLength) { | ||
let lastIdx = -1; | ||
switch (rowState) { | ||
case ROW_ID: | ||
{ | ||
const byte = chunk[i++]; | ||
if (byte === 58 | ||
/* ":" */ | ||
) { | ||
// Finished the rowID, next we'll parse the tag. | ||
rowState = ROW_TAG; | ||
} else { | ||
rowID = rowID << 4 | (byte > 96 ? byte - 87 : byte - 48); | ||
} | ||
continue; | ||
} | ||
case ROW_TAG: | ||
{ | ||
const resolvedRowTag = chunk[i]; | ||
if (resolvedRowTag === 84 | ||
/* "T" */ | ||
|| (resolvedRowTag === 65 | ||
/* "A" */ | ||
|| resolvedRowTag === 67 | ||
/* "C" */ | ||
|| resolvedRowTag === 99 | ||
/* "c" */ | ||
|| resolvedRowTag === 85 | ||
/* "U" */ | ||
|| resolvedRowTag === 83 | ||
/* "S" */ | ||
|| resolvedRowTag === 115 | ||
/* "s" */ | ||
|| resolvedRowTag === 76 | ||
/* "L" */ | ||
|| resolvedRowTag === 108 | ||
/* "l" */ | ||
|| resolvedRowTag === 70 | ||
/* "F" */ | ||
|| resolvedRowTag === 100 | ||
/* "d" */ | ||
|| resolvedRowTag === 78 | ||
/* "N" */ | ||
|| resolvedRowTag === 109 | ||
/* "m" */ | ||
|| resolvedRowTag === 86) | ||
/* "V" */ | ||
) { | ||
rowTag = resolvedRowTag; | ||
rowState = ROW_LENGTH; | ||
i++; | ||
} else if (resolvedRowTag > 64 && resolvedRowTag < 91 | ||
/* "A"-"Z" */ | ||
) { | ||
rowTag = resolvedRowTag; | ||
rowState = ROW_CHUNK_BY_NEWLINE; | ||
i++; | ||
} else { | ||
rowTag = 0; | ||
rowState = ROW_CHUNK_BY_NEWLINE; // This was an unknown tag so it was probably part of the data. | ||
} | ||
continue; | ||
} | ||
case ROW_LENGTH: | ||
{ | ||
const byte = chunk[i++]; | ||
if (byte === 44 | ||
/* "," */ | ||
) { | ||
// Finished the rowLength, next we'll buffer up to that length. | ||
rowState = ROW_CHUNK_BY_LENGTH; | ||
} else { | ||
rowLength = rowLength << 4 | (byte > 96 ? byte - 87 : byte - 48); | ||
} | ||
continue; | ||
} | ||
case ROW_CHUNK_BY_NEWLINE: | ||
{ | ||
// We're looking for a newline | ||
lastIdx = chunk.indexOf(10 | ||
/* "\n" */ | ||
, i); | ||
case 73: | ||
resolveModule(response, id, buffer); | ||
break; | ||
case 72: | ||
id = buffer[0]; | ||
buffer = buffer.slice(1); | ||
response = JSON.parse(buffer, response._fromJSON); | ||
buffer = ReactDOMSharedInternals.d; | ||
switch (id) { | ||
case "D": | ||
buffer.D(response); | ||
break; | ||
} | ||
case ROW_CHUNK_BY_LENGTH: | ||
{ | ||
// We're looking for the remaining byte length | ||
lastIdx = i + rowLength; | ||
if (lastIdx > chunk.length) { | ||
lastIdx = -1; | ||
} | ||
case "C": | ||
"string" === typeof response | ||
? buffer.C(response) | ||
: buffer.C(response[0], response[1]); | ||
break; | ||
} | ||
} | ||
const offset = chunk.byteOffset + i; | ||
if (lastIdx > -1) { | ||
// We found the last chunk of the row | ||
const length = lastIdx - i; | ||
const lastChunk = new Uint8Array(chunk.buffer, offset, length); | ||
processFullRow(response, rowID, rowTag, buffer, lastChunk); // Reset state machine for a new row | ||
i = lastIdx; | ||
if (rowState === ROW_CHUNK_BY_NEWLINE) { | ||
// If we're trailing by a newline we need to skip it. | ||
i++; | ||
case "L": | ||
id = response[0]; | ||
tag = response[1]; | ||
3 === response.length | ||
? buffer.L(id, tag, response[2]) | ||
: buffer.L(id, tag); | ||
break; | ||
case "m": | ||
"string" === typeof response | ||
? buffer.m(response) | ||
: buffer.m(response[0], response[1]); | ||
break; | ||
case "X": | ||
"string" === typeof response | ||
? buffer.X(response) | ||
: buffer.X(response[0], response[1]); | ||
break; | ||
case "S": | ||
"string" === typeof response | ||
? buffer.S(response) | ||
: buffer.S( | ||
response[0], | ||
0 === response[1] ? void 0 : response[1], | ||
3 === response.length ? response[2] : void 0 | ||
); | ||
break; | ||
case "M": | ||
"string" === typeof response | ||
? buffer.M(response) | ||
: buffer.M(response[0], response[1]); | ||
} | ||
rowState = ROW_ID; | ||
rowTag = 0; | ||
rowID = 0; | ||
rowLength = 0; | ||
buffer.length = 0; | ||
} else { | ||
// The rest of this row is in a future chunk. We stash the rest of the | ||
// current chunk until we can process the full row. | ||
const length = chunk.byteLength - i; | ||
const remainingSlice = new Uint8Array(chunk.buffer, offset, length); | ||
buffer.push(remainingSlice); // Update how many bytes we're still waiting for. If we're looking for | ||
// a newline, this doesn't hurt since we'll just ignore it. | ||
rowLength -= remainingSlice.byteLength; | ||
break; | ||
} | ||
case 69: | ||
tag = JSON.parse(buffer); | ||
buffer = resolveErrorProd(); | ||
buffer.digest = tag.digest; | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) | ||
? triggerErrorOnChunk(chunk, buffer) | ||
: tag.set(id, new ReactPromise("rejected", null, buffer, response)); | ||
break; | ||
case 84: | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) && "pending" !== chunk.status | ||
? chunk.reason.enqueueValue(buffer) | ||
: tag.set(id, new ReactPromise("fulfilled", buffer, null, response)); | ||
break; | ||
case 78: | ||
case 68: | ||
case 87: | ||
throw Error( | ||
"Failed to read a RSC payload created by a development version of React on the server while using a production version on the client. Always use matching versions on the server and the client." | ||
); | ||
case 82: | ||
startReadableStream(response, id, void 0); | ||
break; | ||
case 114: | ||
startReadableStream(response, id, "bytes"); | ||
break; | ||
case 88: | ||
startAsyncIterable(response, id, !1); | ||
break; | ||
case 120: | ||
startAsyncIterable(response, id, !0); | ||
break; | ||
case 67: | ||
(response = response._chunks.get(id)) && | ||
"fulfilled" === response.status && | ||
response.reason.close("" === buffer ? '"$undefined"' : buffer); | ||
break; | ||
case 80: | ||
buffer = Error( | ||
"A Server Component was postponed. The reason is omitted in production builds to avoid leaking sensitive details." | ||
); | ||
buffer.$$typeof = REACT_POSTPONE_TYPE; | ||
buffer.stack = "Error: " + buffer.message; | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) | ||
? triggerErrorOnChunk(chunk, buffer) | ||
: tag.set(id, new ReactPromise("rejected", null, buffer, response)); | ||
break; | ||
default: | ||
(tag = response._chunks), | ||
(chunk = tag.get(id)) | ||
? resolveModelChunk(chunk, buffer) | ||
: tag.set( | ||
id, | ||
new ReactPromise("resolved_model", buffer, null, response) | ||
); | ||
} | ||
response._rowState = rowState; | ||
response._rowID = rowID; | ||
response._rowTag = rowTag; | ||
response._rowLength = rowLength; | ||
} | ||
function parseModel(response, json) { | ||
return JSON.parse(json, response._fromJSON); | ||
} | ||
function createFromJSONCallback(response) { | ||
// $FlowFixMe[missing-this-annot] | ||
return function (key, value) { | ||
if (typeof value === 'string') { | ||
// We can't use .bind here because we need the "this" value. | ||
if ("string" === typeof value) | ||
return parseModelString(response, this, key, value); | ||
if ("object" === typeof value && null !== value) { | ||
if (value[0] === REACT_ELEMENT_TYPE) { | ||
if ( | ||
((key = { | ||
$$typeof: REACT_ELEMENT_TYPE, | ||
type: value[1], | ||
key: value[2], | ||
ref: null, | ||
props: value[3] | ||
}), | ||
null !== initializingHandler) | ||
) | ||
if ( | ||
((value = initializingHandler), | ||
(initializingHandler = value.parent), | ||
value.errored) | ||
) | ||
(key = new ReactPromise("rejected", null, value.value, response)), | ||
(key = createLazyChunkWrapper(key)); | ||
else if (0 < value.deps) { | ||
var blockedChunk = new ReactPromise( | ||
"blocked", | ||
null, | ||
null, | ||
response | ||
); | ||
value.value = key; | ||
value.chunk = blockedChunk; | ||
key = createLazyChunkWrapper(blockedChunk); | ||
} | ||
} else key = value; | ||
return key; | ||
} | ||
if (typeof value === 'object' && value !== null) { | ||
return parseModelTuple(response, value); | ||
} | ||
return value; | ||
}; | ||
} | ||
function close(response) { | ||
// In case there are any remaining unresolved chunks, they won't | ||
// be resolved now. So we need to issue an error to those. | ||
// Ideally we should be able to early bail out if we kept a | ||
// ref count of pending chunks. | ||
reportGlobalError(response, new Error('Connection closed.')); | ||
} | ||
function createResponseFromOptions(options) { | ||
return createResponse(null, null, options && options.callServer ? options.callServer : undefined, undefined // nonce | ||
return new ResponseInstance( | ||
null, | ||
null, | ||
null, | ||
options && options.callServer ? options.callServer : void 0, | ||
void 0, | ||
void 0, | ||
options && options.temporaryReferences | ||
? options.temporaryReferences | ||
: void 0 | ||
); | ||
} | ||
function startReadingFromStream(response, stream) { | ||
const reader = stream.getReader(); | ||
function progress(_ref) { | ||
let done = _ref.done, | ||
value = _ref.value; | ||
var value = _ref.value; | ||
if (_ref.done) reportGlobalError(response, Error("Connection closed.")); | ||
else { | ||
var i = 0, | ||
rowState = response._rowState; | ||
_ref = response._rowID; | ||
for ( | ||
var rowTag = response._rowTag, | ||
rowLength = response._rowLength, | ||
buffer = response._buffer, | ||
chunkLength = value.length; | ||
i < chunkLength; | ||
if (done) { | ||
close(response); | ||
return; | ||
) { | ||
var lastIdx = -1; | ||
switch (rowState) { | ||
case 0: | ||
lastIdx = value[i++]; | ||
58 === lastIdx | ||
? (rowState = 1) | ||
: (_ref = | ||
(_ref << 4) | (96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 1: | ||
rowState = value[i]; | ||
84 === rowState || | ||
65 === rowState || | ||
79 === rowState || | ||
111 === rowState || | ||
85 === rowState || | ||
83 === rowState || | ||
115 === rowState || | ||
76 === rowState || | ||
108 === rowState || | ||
71 === rowState || | ||
103 === rowState || | ||
77 === rowState || | ||
109 === rowState || | ||
86 === rowState | ||
? ((rowTag = rowState), (rowState = 2), i++) | ||
: (64 < rowState && 91 > rowState) || | ||
35 === rowState || | ||
114 === rowState || | ||
120 === rowState | ||
? ((rowTag = rowState), (rowState = 3), i++) | ||
: ((rowTag = 0), (rowState = 3)); | ||
continue; | ||
case 2: | ||
lastIdx = value[i++]; | ||
44 === lastIdx | ||
? (rowState = 4) | ||
: (rowLength = | ||
(rowLength << 4) | | ||
(96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 3: | ||
lastIdx = value.indexOf(10, i); | ||
break; | ||
case 4: | ||
(lastIdx = i + rowLength), lastIdx > value.length && (lastIdx = -1); | ||
} | ||
var offset = value.byteOffset + i; | ||
if (-1 < lastIdx) | ||
(rowLength = new Uint8Array(value.buffer, offset, lastIdx - i)), | ||
processFullBinaryRow(response, _ref, rowTag, buffer, rowLength), | ||
(i = lastIdx), | ||
3 === rowState && i++, | ||
(rowLength = _ref = rowTag = rowState = 0), | ||
(buffer.length = 0); | ||
else { | ||
value = new Uint8Array(value.buffer, offset, value.byteLength - i); | ||
buffer.push(value); | ||
rowLength -= value.byteLength; | ||
break; | ||
} | ||
} | ||
response._rowState = rowState; | ||
response._rowID = _ref; | ||
response._rowTag = rowTag; | ||
response._rowLength = rowLength; | ||
return reader.read().then(progress).catch(error); | ||
} | ||
const buffer = value; | ||
processBinaryChunk(response, buffer); | ||
return reader.read().then(progress).catch(error); | ||
} | ||
function error(e) { | ||
reportGlobalError(response, e); | ||
} | ||
var reader = stream.getReader(); | ||
reader.read().then(progress).catch(error); | ||
} | ||
function createFromReadableStream(stream, options) { | ||
const response = createResponseFromOptions(options); | ||
startReadingFromStream(response, stream); | ||
return getRoot(response); | ||
} | ||
function createFromFetch(promiseForResponse, options) { | ||
const response = createResponseFromOptions(options); | ||
promiseForResponse.then(function (r) { | ||
startReadingFromStream(response, r.body); | ||
}, function (e) { | ||
reportGlobalError(response, e); | ||
exports.createFromFetch = function (promiseForResponse, options) { | ||
var response = createResponseFromOptions(options); | ||
promiseForResponse.then( | ||
function (r) { | ||
startReadingFromStream(response, r.body); | ||
}, | ||
function (e) { | ||
reportGlobalError(response, e); | ||
} | ||
); | ||
return getChunk(response, 0); | ||
}; | ||
exports.createFromReadableStream = function (stream, options) { | ||
options = createResponseFromOptions(options); | ||
startReadingFromStream(options, stream); | ||
return getChunk(options, 0); | ||
}; | ||
exports.createServerReference = function (id, callServer) { | ||
function action() { | ||
var args = Array.prototype.slice.call(arguments); | ||
return callServer(id, args); | ||
} | ||
registerServerReference(action, { id: id, bound: null }); | ||
return action; | ||
}; | ||
exports.createTemporaryReferenceSet = function () { | ||
return new Map(); | ||
}; | ||
exports.encodeReply = function (value, options) { | ||
return new Promise(function (resolve, reject) { | ||
var abort = processReply( | ||
value, | ||
"", | ||
options && options.temporaryReferences | ||
? options.temporaryReferences | ||
: void 0, | ||
resolve, | ||
reject | ||
); | ||
if (options && options.signal) { | ||
var signal = options.signal; | ||
if (signal.aborted) abort(signal.reason); | ||
else { | ||
var listener = function () { | ||
abort(signal.reason); | ||
signal.removeEventListener("abort", listener); | ||
}; | ||
signal.addEventListener("abort", listener); | ||
} | ||
} | ||
}); | ||
return getRoot(response); | ||
} | ||
function encodeReply(value) | ||
/* We don't use URLSearchParams yet but maybe */ | ||
{ | ||
return new Promise((resolve, reject) => { | ||
processReply(value, '', resolve, reject); | ||
}); | ||
} | ||
exports.createFromFetch = createFromFetch; | ||
exports.createFromReadableStream = createFromReadableStream; | ||
exports.createServerReference = createServerReference; | ||
exports.encodeReply = encodeReply; | ||
}; |
/** | ||
* @license React | ||
* react-server-dom-webpack-client.edge.production.min.js | ||
* react-server-dom-webpack-client.edge.production.js | ||
* | ||
@@ -11,845 +11,672 @@ * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
'use strict'; | ||
var ReactDOM = require('react-dom'); | ||
function createStringDecoder() { | ||
return new TextDecoder(); | ||
} | ||
const decoderOptions = { | ||
stream: true | ||
}; | ||
function readPartialStringChunk(decoder, buffer) { | ||
return decoder.decode(buffer, decoderOptions); | ||
} | ||
function readFinalStringChunk(decoder, buffer) { | ||
return decoder.decode(buffer); | ||
} | ||
// This is the parsed shape of the wire format which is why it is | ||
// condensed to only the essentialy information | ||
const ID = 0; | ||
const CHUNKS = 1; | ||
const NAME = 2; // export const ASYNC = 3; | ||
// This logic is correct because currently only include the 4th tuple member | ||
// when the module is async. If that changes we will need to actually assert | ||
// the value is true. We don't index into the 4th slot because flow does not | ||
// like the potential out of bounds access | ||
function isAsyncImport(metadata) { | ||
return metadata.length === 4; | ||
} | ||
// The reason this function needs to defined here in this file instead of just | ||
// being exported directly from the WebpackDestination... file is because the | ||
// ClientReferenceMetadata is opaque and we can't unwrap it there. | ||
// This should get inlined and we could also just implement an unwrapping function | ||
// though that risks it getting used in places it shouldn't be. This is unfortunate | ||
// but currently it seems to be the best option we have. | ||
function prepareDestinationForModule(moduleLoading, nonce, metadata) { | ||
prepareDestinationWithChunks(moduleLoading, metadata[CHUNKS], nonce); | ||
} | ||
"use strict"; | ||
var ReactDOM = require("react-dom"), | ||
decoderOptions = { stream: !0 }; | ||
function resolveClientReference(bundlerConfig, metadata) { | ||
if (bundlerConfig) { | ||
const moduleExports = bundlerConfig[metadata[ID]]; | ||
let resolvedModuleData = moduleExports[metadata[NAME]]; | ||
let name; | ||
if (resolvedModuleData) { | ||
// The potentially aliased name. | ||
name = resolvedModuleData.name; | ||
} else { | ||
// If we don't have this specific name, we might have the full module. | ||
resolvedModuleData = moduleExports['*']; | ||
if (!resolvedModuleData) { | ||
throw new Error('Could not find the module "' + metadata[ID] + '" in the React SSR Manifest. ' + 'This is probably a bug in the React Server Components bundler.'); | ||
} | ||
name = metadata[NAME]; | ||
var moduleExports = bundlerConfig[metadata[0]]; | ||
if ((bundlerConfig = moduleExports && moduleExports[metadata[2]])) | ||
moduleExports = bundlerConfig.name; | ||
else { | ||
bundlerConfig = moduleExports && moduleExports["*"]; | ||
if (!bundlerConfig) | ||
throw Error( | ||
'Could not find the module "' + | ||
metadata[0] + | ||
'" in the React Server Consumer Manifest. This is probably a bug in the React Server Components bundler.' | ||
); | ||
moduleExports = metadata[2]; | ||
} | ||
if (isAsyncImport(metadata)) { | ||
return [resolvedModuleData.id, resolvedModuleData.chunks, name, 1 | ||
/* async */ | ||
]; | ||
} else { | ||
return [resolvedModuleData.id, resolvedModuleData.chunks, name]; | ||
} | ||
return 4 === metadata.length | ||
? [bundlerConfig.id, bundlerConfig.chunks, moduleExports, 1] | ||
: [bundlerConfig.id, bundlerConfig.chunks, moduleExports]; | ||
} | ||
return metadata; | ||
} | ||
// If they're still pending they're a thenable. This map also exists | ||
// in Webpack but unfortunately it's not exposed so we have to | ||
// replicate it in user space. null means that it has already loaded. | ||
const chunkCache = new Map(); | ||
function resolveServerReference(bundlerConfig, id) { | ||
var name = "", | ||
resolvedModuleData = bundlerConfig[id]; | ||
if (resolvedModuleData) name = resolvedModuleData.name; | ||
else { | ||
var idx = id.lastIndexOf("#"); | ||
-1 !== idx && | ||
((name = id.slice(idx + 1)), | ||
(resolvedModuleData = bundlerConfig[id.slice(0, idx)])); | ||
if (!resolvedModuleData) | ||
throw Error( | ||
'Could not find the module "' + | ||
id + | ||
'" in the React Server Manifest. This is probably a bug in the React Server Components bundler.' | ||
); | ||
} | ||
return resolvedModuleData.async | ||
? [resolvedModuleData.id, resolvedModuleData.chunks, name, 1] | ||
: [resolvedModuleData.id, resolvedModuleData.chunks, name]; | ||
} | ||
var chunkCache = new Map(); | ||
function requireAsyncModule(id) { | ||
// We've already loaded all the chunks. We can require the module. | ||
const promise = __webpack_require__(id); | ||
if (typeof promise.then !== 'function') { | ||
// This wasn't a promise after all. | ||
var promise = __webpack_require__(id); | ||
if ("function" !== typeof promise.then || "fulfilled" === promise.status) | ||
return null; | ||
} else if (promise.status === 'fulfilled') { | ||
// This module was already resolved earlier. | ||
return null; | ||
} else { | ||
// Instrument the Promise to stash the result. | ||
promise.then(value => { | ||
const fulfilledThenable = promise; | ||
fulfilledThenable.status = 'fulfilled'; | ||
fulfilledThenable.value = value; | ||
}, reason => { | ||
const rejectedThenable = promise; | ||
rejectedThenable.status = 'rejected'; | ||
rejectedThenable.reason = reason; | ||
}); | ||
return promise; | ||
} | ||
promise.then( | ||
function (value) { | ||
promise.status = "fulfilled"; | ||
promise.value = value; | ||
}, | ||
function (reason) { | ||
promise.status = "rejected"; | ||
promise.reason = reason; | ||
} | ||
); | ||
return promise; | ||
} | ||
function ignoreReject() {// We rely on rejected promises to be handled by another listener. | ||
} // Start preloading the modules since we might need them soon. | ||
// This function doesn't suspend. | ||
function ignoreReject() {} | ||
function preloadModule(metadata) { | ||
const chunks = metadata[CHUNKS]; | ||
const promises = []; | ||
let i = 0; | ||
while (i < chunks.length) { | ||
const chunkId = chunks[i++]; | ||
for (var chunks = metadata[1], promises = [], i = 0; i < chunks.length; ) { | ||
var chunkId = chunks[i++]; | ||
chunks[i++]; | ||
const entry = chunkCache.get(chunkId); | ||
if (entry === undefined) { | ||
const thenable = loadChunk(chunkId); | ||
promises.push(thenable); // $FlowFixMe[method-unbinding] | ||
const resolve = chunkCache.set.bind(chunkCache, chunkId, null); | ||
thenable.then(resolve, ignoreReject); | ||
chunkCache.set(chunkId, thenable); | ||
} else if (entry !== null) { | ||
var entry = chunkCache.get(chunkId); | ||
if (void 0 === entry) { | ||
entry = __webpack_chunk_load__(chunkId); | ||
promises.push(entry); | ||
} | ||
var resolve = chunkCache.set.bind(chunkCache, chunkId, null); | ||
entry.then(resolve, ignoreReject); | ||
chunkCache.set(chunkId, entry); | ||
} else null !== entry && promises.push(entry); | ||
} | ||
if (isAsyncImport(metadata)) { | ||
if (promises.length === 0) { | ||
return requireAsyncModule(metadata[ID]); | ||
} else { | ||
return Promise.all(promises).then(() => { | ||
return requireAsyncModule(metadata[ID]); | ||
}); | ||
} | ||
} else if (promises.length > 0) { | ||
return Promise.all(promises); | ||
} else { | ||
return null; | ||
} | ||
} // Actually require the module or suspend if it's not yet ready. | ||
// Increase priority if necessary. | ||
return 4 === metadata.length | ||
? 0 === promises.length | ||
? requireAsyncModule(metadata[0]) | ||
: Promise.all(promises).then(function () { | ||
return requireAsyncModule(metadata[0]); | ||
}) | ||
: 0 < promises.length | ||
? Promise.all(promises) | ||
: null; | ||
} | ||
function requireModule(metadata) { | ||
let moduleExports = __webpack_require__(metadata[ID]); | ||
if (isAsyncImport(metadata)) { | ||
if (typeof moduleExports.then !== 'function') ; else if (moduleExports.status === 'fulfilled') { | ||
// This Promise should've been instrumented by preloadModule. | ||
var moduleExports = __webpack_require__(metadata[0]); | ||
if (4 === metadata.length && "function" === typeof moduleExports.then) | ||
if ("fulfilled" === moduleExports.status) | ||
moduleExports = moduleExports.value; | ||
} else { | ||
throw moduleExports.reason; | ||
else throw moduleExports.reason; | ||
return "*" === metadata[2] | ||
? moduleExports | ||
: "" === metadata[2] | ||
? moduleExports.__esModule | ||
? moduleExports.default | ||
: moduleExports | ||
: moduleExports[metadata[2]]; | ||
} | ||
function prepareDestinationWithChunks(moduleLoading, chunks, nonce$jscomp$0) { | ||
if (null !== moduleLoading) | ||
for (var i = 1; i < chunks.length; i += 2) { | ||
var nonce = nonce$jscomp$0, | ||
JSCompiler_temp_const = ReactDOMSharedInternals.d, | ||
JSCompiler_temp_const$jscomp$0 = JSCompiler_temp_const.X, | ||
JSCompiler_temp_const$jscomp$1 = moduleLoading.prefix + chunks[i]; | ||
var JSCompiler_inline_result = moduleLoading.crossOrigin; | ||
JSCompiler_inline_result = | ||
"string" === typeof JSCompiler_inline_result | ||
? "use-credentials" === JSCompiler_inline_result | ||
? JSCompiler_inline_result | ||
: "" | ||
: void 0; | ||
JSCompiler_temp_const$jscomp$0.call( | ||
JSCompiler_temp_const, | ||
JSCompiler_temp_const$jscomp$1, | ||
{ crossOrigin: JSCompiler_inline_result, nonce: nonce } | ||
); | ||
} | ||
} | ||
if (metadata[NAME] === '*') { | ||
// This is a placeholder value that represents that the caller imported this | ||
// as a CommonJS module as is. | ||
return moduleExports; | ||
} | ||
if (metadata[NAME] === '') { | ||
// This is a placeholder value that represents that the caller accessed the | ||
// default property of this if it was an ESM interop module. | ||
return moduleExports.__esModule ? moduleExports.default : moduleExports; | ||
} | ||
return moduleExports[metadata[NAME]]; | ||
} | ||
function loadChunk(chunkId, filename) { | ||
return __webpack_chunk_load__(chunkId); | ||
var ReactDOMSharedInternals = | ||
ReactDOM.__DOM_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE, | ||
REACT_ELEMENT_TYPE = Symbol.for("react.transitional.element"), | ||
REACT_LAZY_TYPE = Symbol.for("react.lazy"), | ||
REACT_POSTPONE_TYPE = Symbol.for("react.postpone"), | ||
MAYBE_ITERATOR_SYMBOL = Symbol.iterator; | ||
function getIteratorFn(maybeIterable) { | ||
if (null === maybeIterable || "object" !== typeof maybeIterable) return null; | ||
maybeIterable = | ||
(MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL]) || | ||
maybeIterable["@@iterator"]; | ||
return "function" === typeof maybeIterable ? maybeIterable : null; | ||
} | ||
function prepareDestinationWithChunks(moduleLoading, // Chunks are double-indexed [..., idx, filenamex, idy, filenamey, ...] | ||
chunks, nonce) { | ||
if (moduleLoading !== null) { | ||
for (let i = 1; i < chunks.length; i += 2) { | ||
preinitScriptForSSR(moduleLoading.prefix + chunks[i], nonce, moduleLoading.crossOrigin); | ||
var ASYNC_ITERATOR = Symbol.asyncIterator, | ||
isArrayImpl = Array.isArray, | ||
getPrototypeOf = Object.getPrototypeOf, | ||
ObjectPrototype = Object.prototype, | ||
knownServerReferences = new WeakMap(); | ||
function serializeNumber(number) { | ||
return Number.isFinite(number) | ||
? 0 === number && -Infinity === 1 / number | ||
? "$-0" | ||
: number | ||
: Infinity === number | ||
? "$Infinity" | ||
: -Infinity === number | ||
? "$-Infinity" | ||
: "$NaN"; | ||
} | ||
function processReply( | ||
root, | ||
formFieldPrefix, | ||
temporaryReferences, | ||
resolve, | ||
reject | ||
) { | ||
function serializeTypedArray(tag, typedArray) { | ||
typedArray = new Blob([ | ||
new Uint8Array( | ||
typedArray.buffer, | ||
typedArray.byteOffset, | ||
typedArray.byteLength | ||
) | ||
]); | ||
var blobId = nextPartId++; | ||
null === formData && (formData = new FormData()); | ||
formData.append(formFieldPrefix + blobId, typedArray); | ||
return "$" + tag + blobId.toString(16); | ||
} | ||
function serializeBinaryReader(reader) { | ||
function progress(entry) { | ||
entry.done | ||
? ((entry = nextPartId++), | ||
data.append(formFieldPrefix + entry, new Blob(buffer)), | ||
data.append( | ||
formFieldPrefix + streamId, | ||
'"$o' + entry.toString(16) + '"' | ||
), | ||
data.append(formFieldPrefix + streamId, "C"), | ||
pendingParts--, | ||
0 === pendingParts && resolve(data)) | ||
: (buffer.push(entry.value), | ||
reader.read(new Uint8Array(1024)).then(progress, reject)); | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++, | ||
buffer = []; | ||
reader.read(new Uint8Array(1024)).then(progress, reject); | ||
return "$r" + streamId.toString(16); | ||
} | ||
} | ||
const ReactDOMSharedInternals = ReactDOM.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED; | ||
function getCrossOriginString(input) { | ||
if (typeof input === 'string') { | ||
return input === 'use-credentials' ? input : ''; | ||
} | ||
return undefined; | ||
} | ||
// This client file is in the shared folder because it applies to both SSR and browser contexts. | ||
const ReactDOMCurrentDispatcher = ReactDOMSharedInternals.Dispatcher; | ||
function dispatchHint(code, model) { | ||
const dispatcher = ReactDOMCurrentDispatcher.current; | ||
if (dispatcher) { | ||
switch (code) { | ||
case 'D': | ||
{ | ||
const refined = refineModel(code, model); | ||
const href = refined; | ||
dispatcher.prefetchDNS(href); | ||
return; | ||
function serializeReader(reader) { | ||
function progress(entry) { | ||
if (entry.done) | ||
data.append(formFieldPrefix + streamId, "C"), | ||
pendingParts--, | ||
0 === pendingParts && resolve(data); | ||
else | ||
try { | ||
var partJSON = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, partJSON); | ||
reader.read().then(progress, reject); | ||
} catch (x) { | ||
reject(x); | ||
} | ||
case 'C': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preconnect(href); | ||
} else { | ||
const href = refined[0]; | ||
const crossOrigin = refined[1]; | ||
dispatcher.preconnect(href, crossOrigin); | ||
} | ||
return; | ||
} | ||
case 'L': | ||
{ | ||
const refined = refineModel(code, model); | ||
const href = refined[0]; | ||
const as = refined[1]; | ||
if (refined.length === 3) { | ||
const options = refined[2]; | ||
dispatcher.preload(href, as, options); | ||
} else { | ||
dispatcher.preload(href, as); | ||
} | ||
return; | ||
} | ||
case 'm': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preloadModule(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preloadModule(href, options); | ||
} | ||
return; | ||
} | ||
case 'S': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitStyle(href); | ||
} else { | ||
const href = refined[0]; | ||
const precedence = refined[1] === 0 ? undefined : refined[1]; | ||
const options = refined.length === 3 ? refined[2] : undefined; | ||
dispatcher.preinitStyle(href, precedence, options); | ||
} | ||
return; | ||
} | ||
case 'X': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitScript(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preinitScript(href, options); | ||
} | ||
return; | ||
} | ||
case 'M': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitModuleScript(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preinitModuleScript(href, options); | ||
} | ||
return; | ||
} | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++; | ||
reader.read().then(progress, reject); | ||
return "$R" + streamId.toString(16); | ||
} | ||
} // Flow is having trouble refining the HintModels so we help it a bit. | ||
// This should be compiled out in the production build. | ||
function refineModel(code, model) { | ||
return model; | ||
} | ||
function preinitScriptForSSR(href, nonce, crossOrigin) { | ||
const dispatcher = ReactDOMCurrentDispatcher.current; | ||
if (dispatcher) { | ||
dispatcher.preinitScript(href, { | ||
crossOrigin: getCrossOriginString(crossOrigin), | ||
nonce | ||
}); | ||
function serializeReadableStream(stream) { | ||
try { | ||
var binaryReader = stream.getReader({ mode: "byob" }); | ||
} catch (x) { | ||
return serializeReader(stream.getReader()); | ||
} | ||
return serializeBinaryReader(binaryReader); | ||
} | ||
} | ||
// ATTENTION | ||
// When adding new symbols to this file, | ||
// Please consider also adding to 'react-devtools-shared/src/backend/ReactSymbols' | ||
// The Symbol used to tag the ReactElement-like types. | ||
const REACT_ELEMENT_TYPE = Symbol.for('react.element'); | ||
const REACT_LAZY_TYPE = Symbol.for('react.lazy'); | ||
const REACT_POSTPONE_TYPE = Symbol.for('react.postpone'); | ||
const MAYBE_ITERATOR_SYMBOL = Symbol.iterator; | ||
const FAUX_ITERATOR_SYMBOL = '@@iterator'; | ||
function getIteratorFn(maybeIterable) { | ||
if (maybeIterable === null || typeof maybeIterable !== 'object') { | ||
return null; | ||
} | ||
const maybeIterator = MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL]; | ||
if (typeof maybeIterator === 'function') { | ||
return maybeIterator; | ||
} | ||
return null; | ||
} | ||
const isArrayImpl = Array.isArray; // eslint-disable-next-line no-redeclare | ||
function isArray(a) { | ||
return isArrayImpl(a); | ||
} | ||
const getPrototypeOf = Object.getPrototypeOf; | ||
const ObjectPrototype = Object.prototype; | ||
const knownServerReferences = new WeakMap(); // Serializable values | ||
// Thenable<ReactServerValue> | ||
// function serializeByValueID(id: number): string { | ||
// return '$' + id.toString(16); | ||
// } | ||
function serializePromiseID(id) { | ||
return '$@' + id.toString(16); | ||
} | ||
function serializeServerReferenceID(id) { | ||
return '$F' + id.toString(16); | ||
} | ||
function serializeSymbolReference(name) { | ||
return '$S' + name; | ||
} | ||
function serializeFormDataReference(id) { | ||
// Why K? F is "Function". D is "Date". What else? | ||
return '$K' + id.toString(16); | ||
} | ||
function serializeNumber(number) { | ||
if (Number.isFinite(number)) { | ||
if (number === 0 && 1 / number === -Infinity) { | ||
return '$-0'; | ||
} else { | ||
return number; | ||
function serializeAsyncIterable(iterable, iterator) { | ||
function progress(entry) { | ||
if (entry.done) { | ||
if (void 0 === entry.value) | ||
data.append(formFieldPrefix + streamId, "C"); | ||
else | ||
try { | ||
var partJSON = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, "C" + partJSON); | ||
} catch (x) { | ||
reject(x); | ||
return; | ||
} | ||
pendingParts--; | ||
0 === pendingParts && resolve(data); | ||
} else | ||
try { | ||
var partJSON$22 = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, partJSON$22); | ||
iterator.next().then(progress, reject); | ||
} catch (x$23) { | ||
reject(x$23); | ||
} | ||
} | ||
} else { | ||
if (number === Infinity) { | ||
return '$Infinity'; | ||
} else if (number === -Infinity) { | ||
return '$-Infinity'; | ||
} else { | ||
return '$NaN'; | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++; | ||
iterable = iterable === iterator; | ||
iterator.next().then(progress, reject); | ||
return "$" + (iterable ? "x" : "X") + streamId.toString(16); | ||
} | ||
} | ||
function serializeUndefined() { | ||
return '$undefined'; | ||
} | ||
function serializeDateFromDateJSON(dateJSON) { | ||
// JSON.stringify automatically calls Date.prototype.toJSON which calls toISOString. | ||
// We need only tack on a $D prefix. | ||
return '$D' + dateJSON; | ||
} | ||
function serializeBigInt(n) { | ||
return '$n' + n.toString(10); | ||
} | ||
function serializeMapID(id) { | ||
return '$Q' + id.toString(16); | ||
} | ||
function serializeSetID(id) { | ||
return '$W' + id.toString(16); | ||
} | ||
function escapeStringValue(value) { | ||
if (value[0] === '$') { | ||
// We need to escape $ prefixed strings since we use those to encode | ||
// references to IDs and as special symbol values. | ||
return '$' + value; | ||
} else { | ||
return value; | ||
} | ||
} | ||
function processReply(root, formFieldPrefix, resolve, reject) { | ||
let nextPartId = 1; | ||
let pendingParts = 0; | ||
let formData = null; | ||
function resolveToJSON(key, value) { | ||
const parent = this; // Make sure that `parent[key]` wasn't JSONified before `value` was passed to us | ||
if (value === null) { | ||
return null; | ||
} | ||
if (typeof value === 'object') { | ||
// $FlowFixMe[method-unbinding] | ||
if (typeof value.then === 'function') { | ||
// We assume that any object with a .then property is a "Thenable" type, | ||
// or a Promise type. Either of which can be represented by a Promise. | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to stream this value. | ||
formData = new FormData(); | ||
} | ||
if (null === value) return null; | ||
if ("object" === typeof value) { | ||
switch (value.$$typeof) { | ||
case REACT_ELEMENT_TYPE: | ||
if (void 0 !== temporaryReferences && -1 === key.indexOf(":")) { | ||
var parentReference = writtenObjects.get(this); | ||
if (void 0 !== parentReference) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), | ||
"$T" | ||
); | ||
} | ||
throw Error( | ||
"React Element cannot be passed to Server Functions from the Client without a temporary reference set. Pass a TemporaryReferenceSet to the options." | ||
); | ||
case REACT_LAZY_TYPE: | ||
parentReference = value._payload; | ||
var init = value._init; | ||
null === formData && (formData = new FormData()); | ||
pendingParts++; | ||
try { | ||
var resolvedModel = init(parentReference), | ||
lazyId = nextPartId++, | ||
partJSON = serializeModel(resolvedModel, lazyId); | ||
formData.append(formFieldPrefix + lazyId, partJSON); | ||
return "$" + lazyId.toString(16); | ||
} catch (x) { | ||
if ( | ||
"object" === typeof x && | ||
null !== x && | ||
"function" === typeof x.then | ||
) { | ||
pendingParts++; | ||
var lazyId$24 = nextPartId++; | ||
parentReference = function () { | ||
try { | ||
var partJSON$25 = serializeModel(value, lazyId$24), | ||
data$26 = formData; | ||
data$26.append(formFieldPrefix + lazyId$24, partJSON$25); | ||
pendingParts--; | ||
0 === pendingParts && resolve(data$26); | ||
} catch (reason) { | ||
reject(reason); | ||
} | ||
}; | ||
x.then(parentReference, parentReference); | ||
return "$" + lazyId$24.toString(16); | ||
} | ||
reject(x); | ||
return null; | ||
} finally { | ||
pendingParts--; | ||
} | ||
} | ||
if ("function" === typeof value.then) { | ||
null === formData && (formData = new FormData()); | ||
pendingParts++; | ||
const promiseId = nextPartId++; | ||
const thenable = value; | ||
thenable.then(partValue => { | ||
const partJSON = JSON.stringify(partValue, resolveToJSON); // $FlowFixMe[incompatible-type] We know it's not null because we assigned it above. | ||
const data = formData; // eslint-disable-next-line react-internal/safe-string-coercion | ||
data.append(formFieldPrefix + promiseId, partJSON); | ||
pendingParts--; | ||
if (pendingParts === 0) { | ||
resolve(data); | ||
var promiseId = nextPartId++; | ||
value.then(function (partValue) { | ||
try { | ||
var partJSON$28 = serializeModel(partValue, promiseId); | ||
partValue = formData; | ||
partValue.append(formFieldPrefix + promiseId, partJSON$28); | ||
pendingParts--; | ||
0 === pendingParts && resolve(partValue); | ||
} catch (reason) { | ||
reject(reason); | ||
} | ||
}, reason => { | ||
// In the future we could consider serializing this as an error | ||
// that throws on the server instead. | ||
reject(reason); | ||
}); | ||
return serializePromiseID(promiseId); | ||
}, reject); | ||
return "$@" + promiseId.toString(16); | ||
} | ||
if (isArray(value)) { | ||
// $FlowFixMe[incompatible-return] | ||
return value; | ||
} // TODO: Should we the Object.prototype.toString.call() to test for cross-realm objects? | ||
parentReference = writtenObjects.get(value); | ||
if (void 0 !== parentReference) | ||
if (modelRoot === value) modelRoot = null; | ||
else return parentReference; | ||
else | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference && | ||
((key = parentReference + ":" + key), | ||
writtenObjects.set(value, key), | ||
void 0 !== temporaryReferences && | ||
temporaryReferences.set(key, value))); | ||
if (isArrayImpl(value)) return value; | ||
if (value instanceof FormData) { | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to use rich objects as its values. | ||
formData = new FormData(); | ||
} | ||
const data = formData; | ||
const refId = nextPartId++; // Copy all the form fields with a prefix for this reference. | ||
// These must come first in the form order because we assume that all the | ||
// fields are available before this is referenced. | ||
const prefix = formFieldPrefix + refId + '_'; // $FlowFixMe[prop-missing]: FormData has forEach. | ||
value.forEach((originalValue, originalKey) => { | ||
data.append(prefix + originalKey, originalValue); | ||
null === formData && (formData = new FormData()); | ||
var data$32 = formData; | ||
key = nextPartId++; | ||
var prefix = formFieldPrefix + key + "_"; | ||
value.forEach(function (originalValue, originalKey) { | ||
data$32.append(prefix + originalKey, originalValue); | ||
}); | ||
return serializeFormDataReference(refId); | ||
return "$K" + key.toString(16); | ||
} | ||
if (value instanceof Map) { | ||
const partJSON = JSON.stringify(Array.from(value), resolveToJSON); | ||
if (formData === null) { | ||
formData = new FormData(); | ||
} | ||
const mapId = nextPartId++; | ||
formData.append(formFieldPrefix + mapId, partJSON); | ||
return serializeMapID(mapId); | ||
if (value instanceof Map) | ||
return ( | ||
(key = nextPartId++), | ||
(parentReference = serializeModel(Array.from(value), key)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$Q" + key.toString(16) | ||
); | ||
if (value instanceof Set) | ||
return ( | ||
(key = nextPartId++), | ||
(parentReference = serializeModel(Array.from(value), key)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$W" + key.toString(16) | ||
); | ||
if (value instanceof ArrayBuffer) | ||
return ( | ||
(key = new Blob([value])), | ||
(parentReference = nextPartId++), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + parentReference, key), | ||
"$A" + parentReference.toString(16) | ||
); | ||
if (value instanceof Int8Array) return serializeTypedArray("O", value); | ||
if (value instanceof Uint8Array) return serializeTypedArray("o", value); | ||
if (value instanceof Uint8ClampedArray) | ||
return serializeTypedArray("U", value); | ||
if (value instanceof Int16Array) return serializeTypedArray("S", value); | ||
if (value instanceof Uint16Array) return serializeTypedArray("s", value); | ||
if (value instanceof Int32Array) return serializeTypedArray("L", value); | ||
if (value instanceof Uint32Array) return serializeTypedArray("l", value); | ||
if (value instanceof Float32Array) return serializeTypedArray("G", value); | ||
if (value instanceof Float64Array) return serializeTypedArray("g", value); | ||
if (value instanceof BigInt64Array) | ||
return serializeTypedArray("M", value); | ||
if (value instanceof BigUint64Array) | ||
return serializeTypedArray("m", value); | ||
if (value instanceof DataView) return serializeTypedArray("V", value); | ||
if ("function" === typeof Blob && value instanceof Blob) | ||
return ( | ||
null === formData && (formData = new FormData()), | ||
(key = nextPartId++), | ||
formData.append(formFieldPrefix + key, value), | ||
"$B" + key.toString(16) | ||
); | ||
if ((key = getIteratorFn(value))) | ||
return ( | ||
(parentReference = key.call(value)), | ||
parentReference === value | ||
? ((key = nextPartId++), | ||
(parentReference = serializeModel( | ||
Array.from(parentReference), | ||
key | ||
)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$i" + key.toString(16)) | ||
: Array.from(parentReference) | ||
); | ||
if ( | ||
"function" === typeof ReadableStream && | ||
value instanceof ReadableStream | ||
) | ||
return serializeReadableStream(value); | ||
key = value[ASYNC_ITERATOR]; | ||
if ("function" === typeof key) | ||
return serializeAsyncIterable(value, key.call(value)); | ||
key = getPrototypeOf(value); | ||
if ( | ||
key !== ObjectPrototype && | ||
(null === key || null !== getPrototypeOf(key)) | ||
) { | ||
if (void 0 === temporaryReferences) | ||
throw Error( | ||
"Only plain objects, and a few built-ins, can be passed to Server Functions. Classes or null prototypes are not supported." | ||
); | ||
return "$T"; | ||
} | ||
if (value instanceof Set) { | ||
const partJSON = JSON.stringify(Array.from(value), resolveToJSON); | ||
if (formData === null) { | ||
formData = new FormData(); | ||
} | ||
const setId = nextPartId++; | ||
formData.append(formFieldPrefix + setId, partJSON); | ||
return serializeSetID(setId); | ||
} | ||
const iteratorFn = getIteratorFn(value); | ||
if (iteratorFn) { | ||
return Array.from(value); | ||
} // Verify that this is a simple plain object. | ||
const proto = getPrototypeOf(value); | ||
if (proto !== ObjectPrototype && (proto === null || getPrototypeOf(proto) !== null)) { | ||
throw new Error('Only plain objects, and a few built-ins, can be passed to Server Actions. ' + 'Classes or null prototypes are not supported.'); | ||
} | ||
return value; | ||
} | ||
if (typeof value === 'string') { | ||
// TODO: Maybe too clever. If we support URL there's no similar trick. | ||
if (value[value.length - 1] === 'Z') { | ||
// Possibly a Date, whose toJSON automatically calls toISOString | ||
// $FlowFixMe[incompatible-use] | ||
const originalValue = parent[key]; | ||
if (originalValue instanceof Date) { | ||
return serializeDateFromDateJSON(value); | ||
} | ||
} | ||
return escapeStringValue(value); | ||
if ("string" === typeof value) { | ||
if ("Z" === value[value.length - 1] && this[key] instanceof Date) | ||
return "$D" + value; | ||
key = "$" === value[0] ? "$" + value : value; | ||
return key; | ||
} | ||
if (typeof value === 'boolean') { | ||
return value; | ||
if ("boolean" === typeof value) return value; | ||
if ("number" === typeof value) return serializeNumber(value); | ||
if ("undefined" === typeof value) return "$undefined"; | ||
if ("function" === typeof value) { | ||
parentReference = knownServerReferences.get(value); | ||
if (void 0 !== parentReference) | ||
return ( | ||
(key = JSON.stringify(parentReference, resolveToJSON)), | ||
null === formData && (formData = new FormData()), | ||
(parentReference = nextPartId++), | ||
formData.set(formFieldPrefix + parentReference, key), | ||
"$F" + parentReference.toString(16) | ||
); | ||
if ( | ||
void 0 !== temporaryReferences && | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference) | ||
) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), "$T" | ||
); | ||
throw Error( | ||
"Client Functions cannot be passed directly to Server Functions. Only Functions passed from the Server can be passed back again." | ||
); | ||
} | ||
if (typeof value === 'number') { | ||
return serializeNumber(value); | ||
if ("symbol" === typeof value) { | ||
if ( | ||
void 0 !== temporaryReferences && | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference) | ||
) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), "$T" | ||
); | ||
throw Error( | ||
"Symbols cannot be passed to a Server Function without a temporary reference set. Pass a TemporaryReferenceSet to the options." | ||
); | ||
} | ||
if (typeof value === 'undefined') { | ||
return serializeUndefined(); | ||
} | ||
if (typeof value === 'function') { | ||
const metaData = knownServerReferences.get(value); | ||
if (metaData !== undefined) { | ||
const metaDataJSON = JSON.stringify(metaData, resolveToJSON); | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to stream this value. | ||
formData = new FormData(); | ||
} // The reference to this function came from the same client so we can pass it back. | ||
const refId = nextPartId++; // eslint-disable-next-line react-internal/safe-string-coercion | ||
formData.set(formFieldPrefix + refId, metaDataJSON); | ||
return serializeServerReferenceID(refId); | ||
} | ||
throw new Error('Client Functions cannot be passed directly to Server Functions. ' + 'Only Functions passed from the Server can be passed back again.'); | ||
} | ||
if (typeof value === 'symbol') { | ||
// $FlowFixMe[incompatible-type] `description` might be undefined | ||
const name = value.description; | ||
if (Symbol.for(name) !== value) { | ||
throw new Error('Only global symbols received from Symbol.for(...) can be passed to Server Functions. ' + ("The symbol Symbol.for(" + // $FlowFixMe[incompatible-type] `description` might be undefined | ||
value.description + ") cannot be found among global symbols.")); | ||
} | ||
return serializeSymbolReference(name); | ||
} | ||
if (typeof value === 'bigint') { | ||
return serializeBigInt(value); | ||
} | ||
throw new Error("Type " + typeof value + " is not supported as an argument to a Server Function."); | ||
} // $FlowFixMe[incompatible-type] it's not going to be undefined because we'll encode it. | ||
const json = JSON.stringify(root, resolveToJSON); | ||
if (formData === null) { | ||
// If it's a simple data structure, we just use plain JSON. | ||
resolve(json); | ||
} else { | ||
// Otherwise, we use FormData to let us stream in the result. | ||
formData.set(formFieldPrefix + '0', json); | ||
if (pendingParts === 0) { | ||
// $FlowFixMe[incompatible-call] this has already been refined. | ||
resolve(formData); | ||
} | ||
if ("bigint" === typeof value) return "$n" + value.toString(10); | ||
throw Error( | ||
"Type " + | ||
typeof value + | ||
" is not supported as an argument to a Server Function." | ||
); | ||
} | ||
function serializeModel(model, id) { | ||
"object" === typeof model && | ||
null !== model && | ||
((id = "$" + id.toString(16)), | ||
writtenObjects.set(model, id), | ||
void 0 !== temporaryReferences && temporaryReferences.set(id, model)); | ||
modelRoot = model; | ||
return JSON.stringify(model, resolveToJSON); | ||
} | ||
var nextPartId = 1, | ||
pendingParts = 0, | ||
formData = null, | ||
writtenObjects = new WeakMap(), | ||
modelRoot = root, | ||
json = serializeModel(root, 0); | ||
null === formData | ||
? resolve(json) | ||
: (formData.set(formFieldPrefix + "0", json), | ||
0 === pendingParts && resolve(formData)); | ||
return function () { | ||
0 < pendingParts && | ||
((pendingParts = 0), | ||
null === formData ? resolve(json) : resolve(formData)); | ||
}; | ||
} | ||
const boundCache = new WeakMap(); | ||
var boundCache = new WeakMap(); | ||
function encodeFormData(reference) { | ||
let resolve, reject; // We need to have a handle on the thenable so that we can synchronously set | ||
// its status from processReply, when it can complete synchronously. | ||
const thenable = new Promise((res, rej) => { | ||
resolve = res; | ||
reject = rej; | ||
}); | ||
processReply(reference, '', body => { | ||
if (typeof body === 'string') { | ||
const data = new FormData(); | ||
data.append('0', body); | ||
body = data; | ||
var resolve, | ||
reject, | ||
thenable = new Promise(function (res, rej) { | ||
resolve = res; | ||
reject = rej; | ||
}); | ||
processReply( | ||
reference, | ||
"", | ||
void 0, | ||
function (body) { | ||
if ("string" === typeof body) { | ||
var data = new FormData(); | ||
data.append("0", body); | ||
body = data; | ||
} | ||
thenable.status = "fulfilled"; | ||
thenable.value = body; | ||
resolve(body); | ||
}, | ||
function (e) { | ||
thenable.status = "rejected"; | ||
thenable.reason = e; | ||
reject(e); | ||
} | ||
const fulfilled = thenable; | ||
fulfilled.status = 'fulfilled'; | ||
fulfilled.value = body; | ||
resolve(body); | ||
}, e => { | ||
const rejected = thenable; | ||
rejected.status = 'rejected'; | ||
rejected.reason = e; | ||
reject(e); | ||
}); | ||
); | ||
return thenable; | ||
} | ||
function encodeFormAction(identifierPrefix) { | ||
const reference = knownServerReferences.get(this); | ||
if (!reference) { | ||
throw new Error('Tried to encode a Server Action from a different instance than the encoder is from. ' + 'This is a bug in React.'); | ||
} | ||
let data = null; | ||
let name; | ||
const boundPromise = reference.bound; | ||
if (boundPromise !== null) { | ||
let thenable = boundCache.get(reference); | ||
if (!thenable) { | ||
thenable = encodeFormData(reference); | ||
boundCache.set(reference, thenable); | ||
} | ||
if (thenable.status === 'rejected') { | ||
throw thenable.reason; | ||
} else if (thenable.status !== 'fulfilled') { | ||
throw thenable; | ||
} | ||
const encodedFormData = thenable.value; // This is hacky but we need the identifier prefix to be added to | ||
// all fields but the suspense cache would break since we might get | ||
// a new identifier each time. So we just append it at the end instead. | ||
const prefixedData = new FormData(); // $FlowFixMe[prop-missing] | ||
encodedFormData.forEach((value, key) => { | ||
prefixedData.append('$ACTION_' + identifierPrefix + ':' + key, value); | ||
function defaultEncodeFormAction(identifierPrefix) { | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
var data = null; | ||
if (null !== reference.bound) { | ||
data = boundCache.get(reference); | ||
data || | ||
((data = encodeFormData(reference)), boundCache.set(reference, data)); | ||
if ("rejected" === data.status) throw data.reason; | ||
if ("fulfilled" !== data.status) throw data; | ||
reference = data.value; | ||
var prefixedData = new FormData(); | ||
reference.forEach(function (value, key) { | ||
prefixedData.append("$ACTION_" + identifierPrefix + ":" + key, value); | ||
}); | ||
data = prefixedData; // We encode the name of the prefix containing the data. | ||
name = '$ACTION_REF_' + identifierPrefix; | ||
} else { | ||
// This is the simple case so we can just encode the ID. | ||
name = '$ACTION_ID_' + reference.id; | ||
} | ||
data = prefixedData; | ||
reference = "$ACTION_REF_" + identifierPrefix; | ||
} else reference = "$ACTION_ID_" + reference.id; | ||
return { | ||
name: name, | ||
method: 'POST', | ||
encType: 'multipart/form-data', | ||
name: reference, | ||
method: "POST", | ||
encType: "multipart/form-data", | ||
data: data | ||
}; | ||
} | ||
function isSignatureEqual(referenceId, numberOfBoundArgs) { | ||
const reference = knownServerReferences.get(this); | ||
if (!reference) { | ||
throw new Error('Tried to encode a Server Action from a different instance than the encoder is from. ' + 'This is a bug in React.'); | ||
} | ||
if (reference.id !== referenceId) { | ||
// These are different functions. | ||
return false; | ||
} // Now check if the number of bound arguments is the same. | ||
const boundPromise = reference.bound; | ||
if (boundPromise === null) { | ||
// No bound arguments. | ||
return numberOfBoundArgs === 0; | ||
} // Unwrap the bound arguments array by suspending, if necessary. As with | ||
// encodeFormData, this means isSignatureEqual can only be called while React | ||
// is rendering. | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
if (reference.id !== referenceId) return !1; | ||
var boundPromise = reference.bound; | ||
if (null === boundPromise) return 0 === numberOfBoundArgs; | ||
switch (boundPromise.status) { | ||
case 'fulfilled': | ||
{ | ||
const boundArgs = boundPromise.value; | ||
return boundArgs.length === numberOfBoundArgs; | ||
} | ||
case 'pending': | ||
{ | ||
throw boundPromise; | ||
} | ||
case 'rejected': | ||
{ | ||
throw boundPromise.reason; | ||
} | ||
case "fulfilled": | ||
return boundPromise.value.length === numberOfBoundArgs; | ||
case "pending": | ||
throw boundPromise; | ||
case "rejected": | ||
throw boundPromise.reason; | ||
default: | ||
{ | ||
if (typeof boundPromise.status === 'string') ; else { | ||
const pendingThenable = boundPromise; | ||
pendingThenable.status = 'pending'; | ||
pendingThenable.then(boundArgs => { | ||
const fulfilledThenable = boundPromise; | ||
fulfilledThenable.status = 'fulfilled'; | ||
fulfilledThenable.value = boundArgs; | ||
}, error => { | ||
const rejectedThenable = boundPromise; | ||
rejectedThenable.status = 'rejected'; | ||
rejectedThenable.reason = error; | ||
}); | ||
} | ||
throw boundPromise; | ||
} | ||
throw ( | ||
("string" !== typeof boundPromise.status && | ||
((boundPromise.status = "pending"), | ||
boundPromise.then( | ||
function (boundArgs) { | ||
boundPromise.status = "fulfilled"; | ||
boundPromise.value = boundArgs; | ||
}, | ||
function (error) { | ||
boundPromise.status = "rejected"; | ||
boundPromise.reason = error; | ||
} | ||
)), | ||
boundPromise) | ||
); | ||
} | ||
} | ||
function registerServerReference(proxy, reference) { | ||
// Expose encoder for use by SSR, as well as a special bind that can be used to | ||
// keep server capabilities. | ||
{ | ||
// Only expose this in builds that would actually use it. Not needed on the client. | ||
Object.defineProperties(proxy, { | ||
$$FORM_ACTION: { | ||
value: encodeFormAction | ||
}, | ||
$$IS_SIGNATURE_EQUAL: { | ||
value: isSignatureEqual | ||
}, | ||
bind: { | ||
value: bind | ||
} | ||
}); | ||
} | ||
knownServerReferences.set(proxy, reference); | ||
} // $FlowFixMe[method-unbinding] | ||
const FunctionBind = Function.prototype.bind; // $FlowFixMe[method-unbinding] | ||
const ArraySlice = Array.prototype.slice; | ||
function registerServerReference(proxy, reference$jscomp$0, encodeFormAction) { | ||
Object.defineProperties(proxy, { | ||
$$FORM_ACTION: { | ||
value: | ||
void 0 === encodeFormAction | ||
? defaultEncodeFormAction | ||
: function () { | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
var boundPromise = reference.bound; | ||
null === boundPromise && (boundPromise = Promise.resolve([])); | ||
return encodeFormAction(reference.id, boundPromise); | ||
} | ||
}, | ||
$$IS_SIGNATURE_EQUAL: { value: isSignatureEqual }, | ||
bind: { value: bind } | ||
}); | ||
knownServerReferences.set(proxy, reference$jscomp$0); | ||
} | ||
var FunctionBind = Function.prototype.bind, | ||
ArraySlice = Array.prototype.slice; | ||
function bind() { | ||
// $FlowFixMe[unsupported-syntax] | ||
const newFn = FunctionBind.apply(this, arguments); | ||
const reference = knownServerReferences.get(this); | ||
var newFn = FunctionBind.apply(this, arguments), | ||
reference = knownServerReferences.get(this); | ||
if (reference) { | ||
const args = ArraySlice.call(arguments, 1); | ||
let boundPromise = null; | ||
if (reference.bound !== null) { | ||
boundPromise = Promise.resolve(reference.bound).then(boundArgs => boundArgs.concat(args)); | ||
} else { | ||
boundPromise = Promise.resolve(args); | ||
} | ||
registerServerReference(newFn, { | ||
id: reference.id, | ||
bound: boundPromise | ||
var args = ArraySlice.call(arguments, 1), | ||
boundPromise = null; | ||
boundPromise = | ||
null !== reference.bound | ||
? Promise.resolve(reference.bound).then(function (boundArgs) { | ||
return boundArgs.concat(args); | ||
}) | ||
: Promise.resolve(args); | ||
Object.defineProperties(newFn, { | ||
$$FORM_ACTION: { value: this.$$FORM_ACTION }, | ||
$$IS_SIGNATURE_EQUAL: { value: isSignatureEqual }, | ||
bind: { value: bind } | ||
}); | ||
knownServerReferences.set(newFn, { id: reference.id, bound: boundPromise }); | ||
} | ||
return newFn; | ||
} | ||
function createServerReference$1(id, callServer) { | ||
const proxy = function () { | ||
// $FlowFixMe[method-unbinding] | ||
const args = Array.prototype.slice.call(arguments); | ||
function createBoundServerReference(metaData, callServer, encodeFormAction) { | ||
function action() { | ||
var args = Array.prototype.slice.call(arguments); | ||
return bound | ||
? "fulfilled" === bound.status | ||
? callServer(id, bound.value.concat(args)) | ||
: Promise.resolve(bound).then(function (boundArgs) { | ||
return callServer(id, boundArgs.concat(args)); | ||
}) | ||
: callServer(id, args); | ||
} | ||
var id = metaData.id, | ||
bound = metaData.bound; | ||
registerServerReference(action, { id: id, bound: bound }, encodeFormAction); | ||
return action; | ||
} | ||
function createServerReference$1(id, callServer, encodeFormAction) { | ||
function action() { | ||
var args = Array.prototype.slice.call(arguments); | ||
return callServer(id, args); | ||
}; | ||
registerServerReference(proxy, { | ||
id, | ||
bound: null | ||
}); | ||
return proxy; | ||
} | ||
registerServerReference(action, { id: id, bound: null }, encodeFormAction); | ||
return action; | ||
} | ||
const ROW_ID = 0; | ||
const ROW_TAG = 1; | ||
const ROW_LENGTH = 2; | ||
const ROW_CHUNK_BY_NEWLINE = 3; | ||
const ROW_CHUNK_BY_LENGTH = 4; | ||
const PENDING = 'pending'; | ||
const BLOCKED = 'blocked'; | ||
const CYCLIC = 'cyclic'; | ||
const RESOLVED_MODEL = 'resolved_model'; | ||
const RESOLVED_MODULE = 'resolved_module'; | ||
const INITIALIZED = 'fulfilled'; | ||
const ERRORED = 'rejected'; // Dev-only | ||
// $FlowFixMe[missing-this-annot] | ||
function Chunk(status, value, reason, response) { | ||
function ReactPromise(status, value, reason, response) { | ||
this.status = status; | ||
@@ -859,78 +686,41 @@ this.value = value; | ||
this._response = response; | ||
} // We subclass Promise.prototype so that we get other methods like .catch | ||
Chunk.prototype = Object.create(Promise.prototype); // TODO: This doesn't return a new Promise chain unlike the real .then | ||
Chunk.prototype.then = function (resolve, reject) { | ||
const chunk = this; // If we have resolved content, we try to initialize it first which | ||
// might put us back into one of the other states. | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
} | ||
ReactPromise.prototype = Object.create(Promise.prototype); | ||
ReactPromise.prototype.then = function (resolve, reject) { | ||
switch (this.status) { | ||
case "resolved_model": | ||
initializeModelChunk(this); | ||
break; | ||
case RESOLVED_MODULE: | ||
initializeModuleChunk(chunk); | ||
case "resolved_module": | ||
initializeModuleChunk(this); | ||
} | ||
switch (this.status) { | ||
case "fulfilled": | ||
resolve(this.value); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
resolve(chunk.value); | ||
case "pending": | ||
case "blocked": | ||
resolve && | ||
(null === this.value && (this.value = []), this.value.push(resolve)); | ||
reject && | ||
(null === this.reason && (this.reason = []), this.reason.push(reject)); | ||
break; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
if (resolve) { | ||
if (chunk.value === null) { | ||
chunk.value = []; | ||
} | ||
chunk.value.push(resolve); | ||
} | ||
if (reject) { | ||
if (chunk.reason === null) { | ||
chunk.reason = []; | ||
} | ||
chunk.reason.push(reject); | ||
} | ||
break; | ||
default: | ||
reject(chunk.reason); | ||
break; | ||
reject && reject(this.reason); | ||
} | ||
}; | ||
function readChunk(chunk) { | ||
// If we have resolved content, we try to initialize it first which | ||
// might put us back into one of the other states. | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
case "resolved_model": | ||
initializeModelChunk(chunk); | ||
break; | ||
case RESOLVED_MODULE: | ||
case "resolved_module": | ||
initializeModuleChunk(chunk); | ||
break; | ||
} // The status might have changed after initialization. | ||
} | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
case "fulfilled": | ||
return chunk.value; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
// eslint-disable-next-line no-throw-literal | ||
case "pending": | ||
case "blocked": | ||
throw chunk; | ||
default: | ||
@@ -940,1059 +730,1087 @@ throw chunk.reason; | ||
} | ||
function getRoot(response) { | ||
const chunk = getChunk(response, 0); | ||
return chunk; | ||
} | ||
function createPendingChunk(response) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(PENDING, null, null, response); | ||
return new ReactPromise("pending", null, null, response); | ||
} | ||
function createBlockedChunk(response) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(BLOCKED, null, null, response); | ||
} | ||
function createErrorChunk(response, error) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(ERRORED, null, error, response); | ||
} | ||
function wakeChunk(listeners, value) { | ||
for (let i = 0; i < listeners.length; i++) { | ||
const listener = listeners[i]; | ||
listener(value); | ||
} | ||
for (var i = 0; i < listeners.length; i++) (0, listeners[i])(value); | ||
} | ||
function wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners) { | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
case "fulfilled": | ||
wakeChunk(resolveListeners, chunk.value); | ||
break; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
chunk.value = resolveListeners; | ||
chunk.reason = rejectListeners; | ||
case "pending": | ||
case "blocked": | ||
if (chunk.value) | ||
for (var i = 0; i < resolveListeners.length; i++) | ||
chunk.value.push(resolveListeners[i]); | ||
else chunk.value = resolveListeners; | ||
if (chunk.reason) { | ||
if (rejectListeners) | ||
for ( | ||
resolveListeners = 0; | ||
resolveListeners < rejectListeners.length; | ||
resolveListeners++ | ||
) | ||
chunk.reason.push(rejectListeners[resolveListeners]); | ||
} else chunk.reason = rejectListeners; | ||
break; | ||
case ERRORED: | ||
if (rejectListeners) { | ||
wakeChunk(rejectListeners, chunk.reason); | ||
} | ||
break; | ||
case "rejected": | ||
rejectListeners && wakeChunk(rejectListeners, chunk.reason); | ||
} | ||
} | ||
function triggerErrorOnChunk(chunk, error) { | ||
if (chunk.status !== PENDING && chunk.status !== BLOCKED) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" !== chunk.status && "blocked" !== chunk.status) | ||
chunk.reason.error(error); | ||
else { | ||
var listeners = chunk.reason; | ||
chunk.status = "rejected"; | ||
chunk.reason = error; | ||
null !== listeners && wakeChunk(listeners, error); | ||
} | ||
const listeners = chunk.reason; | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
if (listeners !== null) { | ||
wakeChunk(listeners, error); | ||
} | ||
} | ||
function createResolvedModelChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(RESOLVED_MODEL, value, null, response); | ||
function createResolvedIteratorResultChunk(response, value, done) { | ||
return new ReactPromise( | ||
"resolved_model", | ||
(done ? '{"done":true,"value":' : '{"done":false,"value":') + value + "}", | ||
null, | ||
response | ||
); | ||
} | ||
function createResolvedModuleChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(RESOLVED_MODULE, value, null, response); | ||
function resolveIteratorResultChunk(chunk, value, done) { | ||
resolveModelChunk( | ||
chunk, | ||
(done ? '{"done":true,"value":' : '{"done":false,"value":') + value + "}" | ||
); | ||
} | ||
function createInitializedTextChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(INITIALIZED, value, null, response); | ||
} | ||
function createInitializedBufferChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(INITIALIZED, value, null, response); | ||
} | ||
function resolveModelChunk(chunk, value) { | ||
if (chunk.status !== PENDING) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" !== chunk.status) chunk.reason.enqueueModel(value); | ||
else { | ||
var resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "resolved_model"; | ||
chunk.value = value; | ||
null !== resolveListeners && | ||
(initializeModelChunk(chunk), | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners)); | ||
} | ||
const resolveListeners = chunk.value; | ||
const rejectListeners = chunk.reason; | ||
const resolvedChunk = chunk; | ||
resolvedChunk.status = RESOLVED_MODEL; | ||
resolvedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
// This is unfortunate that we're reading this eagerly if | ||
// we already have listeners attached since they might no | ||
// longer be rendered or might not be the highest pri. | ||
initializeModelChunk(resolvedChunk); // The status might have changed after initialization. | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
} | ||
function resolveModuleChunk(chunk, value) { | ||
if (chunk.status !== PENDING && chunk.status !== BLOCKED) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" === chunk.status || "blocked" === chunk.status) { | ||
var resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "resolved_module"; | ||
chunk.value = value; | ||
null !== resolveListeners && | ||
(initializeModuleChunk(chunk), | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners)); | ||
} | ||
const resolveListeners = chunk.value; | ||
const rejectListeners = chunk.reason; | ||
const resolvedChunk = chunk; | ||
resolvedChunk.status = RESOLVED_MODULE; | ||
resolvedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
initializeModuleChunk(resolvedChunk); | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
} | ||
let initializingChunk = null; | ||
let initializingChunkBlockedModel = null; | ||
var initializingHandler = null; | ||
function initializeModelChunk(chunk) { | ||
const prevChunk = initializingChunk; | ||
const prevBlocked = initializingChunkBlockedModel; | ||
initializingChunk = chunk; | ||
initializingChunkBlockedModel = null; | ||
const resolvedModel = chunk.value; // We go to the CYCLIC state until we've fully resolved this. | ||
// We do this before parsing in case we try to initialize the same chunk | ||
// while parsing the model. Such as in a cyclic reference. | ||
const cyclicChunk = chunk; | ||
cyclicChunk.status = CYCLIC; | ||
cyclicChunk.value = null; | ||
cyclicChunk.reason = null; | ||
var prevHandler = initializingHandler; | ||
initializingHandler = null; | ||
var resolvedModel = chunk.value; | ||
chunk.status = "blocked"; | ||
chunk.value = null; | ||
chunk.reason = null; | ||
try { | ||
const value = parseModel(chunk._response, resolvedModel); | ||
if (initializingChunkBlockedModel !== null && initializingChunkBlockedModel.deps > 0) { | ||
initializingChunkBlockedModel.value = value; // We discovered new dependencies on modules that are not yet resolved. | ||
// We have to go the BLOCKED state until they're resolved. | ||
const blockedChunk = chunk; | ||
blockedChunk.status = BLOCKED; | ||
blockedChunk.value = null; | ||
blockedChunk.reason = null; | ||
} else { | ||
const resolveListeners = cyclicChunk.value; | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
wakeChunk(resolveListeners, value); | ||
var value = JSON.parse(resolvedModel, chunk._response._fromJSON), | ||
resolveListeners = chunk.value; | ||
null !== resolveListeners && | ||
((chunk.value = null), | ||
(chunk.reason = null), | ||
wakeChunk(resolveListeners, value)); | ||
if (null !== initializingHandler) { | ||
if (initializingHandler.errored) throw initializingHandler.value; | ||
if (0 < initializingHandler.deps) { | ||
initializingHandler.value = value; | ||
initializingHandler.chunk = chunk; | ||
return; | ||
} | ||
} | ||
chunk.status = "fulfilled"; | ||
chunk.value = value; | ||
} catch (error) { | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
(chunk.status = "rejected"), (chunk.reason = error); | ||
} finally { | ||
initializingChunk = prevChunk; | ||
initializingChunkBlockedModel = prevBlocked; | ||
initializingHandler = prevHandler; | ||
} | ||
} | ||
function initializeModuleChunk(chunk) { | ||
try { | ||
const value = requireModule(chunk.value); | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = value; | ||
var value = requireModule(chunk.value); | ||
chunk.status = "fulfilled"; | ||
chunk.value = value; | ||
} catch (error) { | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
(chunk.status = "rejected"), (chunk.reason = error); | ||
} | ||
} // Report that any missing chunks in the model is now going to throw this | ||
// error upon read. Also notify any pending promises. | ||
} | ||
function reportGlobalError(response, error) { | ||
response._chunks.forEach(chunk => { | ||
// If this chunk was already resolved or errored, it won't | ||
// trigger an error but if it wasn't then we need to | ||
// because we won't be getting any new data to resolve it. | ||
if (chunk.status === PENDING) { | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
response._chunks.forEach(function (chunk) { | ||
"pending" === chunk.status && triggerErrorOnChunk(chunk, error); | ||
}); | ||
} | ||
function createElement(type, key, props) { | ||
const element = { | ||
// This tag allows us to uniquely identify this as a React Element | ||
$$typeof: REACT_ELEMENT_TYPE, | ||
// Built-in properties that belong on the element | ||
type: type, | ||
key: key, | ||
ref: null, | ||
props: props, | ||
// Record the component responsible for creating this element. | ||
_owner: null | ||
}; | ||
return element; | ||
} | ||
function createLazyChunkWrapper(chunk) { | ||
const lazyType = { | ||
$$typeof: REACT_LAZY_TYPE, | ||
_payload: chunk, | ||
_init: readChunk | ||
}; | ||
return lazyType; | ||
return { $$typeof: REACT_LAZY_TYPE, _payload: chunk, _init: readChunk }; | ||
} | ||
function getChunk(response, id) { | ||
const chunks = response._chunks; | ||
let chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunk = createPendingChunk(response); | ||
chunks.set(id, chunk); | ||
} | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk || ((chunk = createPendingChunk(response)), chunks.set(id, chunk)); | ||
return chunk; | ||
} | ||
function createModelResolver(chunk, parentObject, key, cyclic) { | ||
let blocked; | ||
if (initializingChunkBlockedModel) { | ||
blocked = initializingChunkBlockedModel; | ||
if (!cyclic) { | ||
blocked.deps++; | ||
function waitForReference( | ||
referencedChunk, | ||
parentObject, | ||
key, | ||
response, | ||
map, | ||
path | ||
) { | ||
function fulfill(value) { | ||
for (var i = 1; i < path.length; i++) { | ||
for (; value.$$typeof === REACT_LAZY_TYPE; ) | ||
if (((value = value._payload), value === handler.chunk)) | ||
value = handler.value; | ||
else if ("fulfilled" === value.status) value = value.value; | ||
else { | ||
path.splice(0, i - 1); | ||
value.then(fulfill, reject); | ||
return; | ||
} | ||
value = value[path[i]]; | ||
} | ||
} else { | ||
blocked = initializingChunkBlockedModel = { | ||
deps: cyclic ? 0 : 1, | ||
value: null | ||
i = map(response, value, parentObject, key); | ||
parentObject[key] = i; | ||
"" === key && null === handler.value && (handler.value = i); | ||
if ( | ||
parentObject[0] === REACT_ELEMENT_TYPE && | ||
"object" === typeof handler.value && | ||
null !== handler.value && | ||
handler.value.$$typeof === REACT_ELEMENT_TYPE | ||
) | ||
switch (((value = handler.value), key)) { | ||
case "3": | ||
value.props = i; | ||
} | ||
handler.deps--; | ||
0 === handler.deps && | ||
((i = handler.chunk), | ||
null !== i && | ||
"blocked" === i.status && | ||
((value = i.value), | ||
(i.status = "fulfilled"), | ||
(i.value = handler.value), | ||
null !== value && wakeChunk(value, handler.value))); | ||
} | ||
function reject(error) { | ||
if (!handler.errored) { | ||
handler.errored = !0; | ||
handler.value = error; | ||
var chunk = handler.chunk; | ||
null !== chunk && | ||
"blocked" === chunk.status && | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
} | ||
if (initializingHandler) { | ||
var handler = initializingHandler; | ||
handler.deps++; | ||
} else | ||
handler = initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: null, | ||
deps: 1, | ||
errored: !1 | ||
}; | ||
} | ||
return value => { | ||
parentObject[key] = value; | ||
blocked.deps--; | ||
if (blocked.deps === 0) { | ||
if (chunk.status !== BLOCKED) { | ||
return; | ||
referencedChunk.then(fulfill, reject); | ||
return null; | ||
} | ||
function loadServerReference(response, metaData, parentObject, key) { | ||
if (!response._serverReferenceConfig) | ||
return createBoundServerReference( | ||
metaData, | ||
response._callServer, | ||
response._encodeFormAction | ||
); | ||
var serverReference = resolveServerReference( | ||
response._serverReferenceConfig, | ||
metaData.id | ||
); | ||
if ((response = preloadModule(serverReference))) | ||
metaData.bound && (response = Promise.all([response, metaData.bound])); | ||
else if (metaData.bound) response = Promise.resolve(metaData.bound); | ||
else return requireModule(serverReference); | ||
if (initializingHandler) { | ||
var handler = initializingHandler; | ||
handler.deps++; | ||
} else | ||
handler = initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: null, | ||
deps: 1, | ||
errored: !1 | ||
}; | ||
response.then( | ||
function () { | ||
var resolvedValue = requireModule(serverReference); | ||
if (metaData.bound) { | ||
var boundArgs = metaData.bound.value.slice(0); | ||
boundArgs.unshift(null); | ||
resolvedValue = resolvedValue.bind.apply(resolvedValue, boundArgs); | ||
} | ||
const resolveListeners = chunk.value; | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = blocked.value; | ||
if (resolveListeners !== null) { | ||
wakeChunk(resolveListeners, blocked.value); | ||
parentObject[key] = resolvedValue; | ||
"" === key && null === handler.value && (handler.value = resolvedValue); | ||
if ( | ||
parentObject[0] === REACT_ELEMENT_TYPE && | ||
"object" === typeof handler.value && | ||
null !== handler.value && | ||
handler.value.$$typeof === REACT_ELEMENT_TYPE | ||
) | ||
switch (((boundArgs = handler.value), key)) { | ||
case "3": | ||
boundArgs.props = resolvedValue; | ||
} | ||
handler.deps--; | ||
0 === handler.deps && | ||
((resolvedValue = handler.chunk), | ||
null !== resolvedValue && | ||
"blocked" === resolvedValue.status && | ||
((boundArgs = resolvedValue.value), | ||
(resolvedValue.status = "fulfilled"), | ||
(resolvedValue.value = handler.value), | ||
null !== boundArgs && wakeChunk(boundArgs, handler.value))); | ||
}, | ||
function (error) { | ||
if (!handler.errored) { | ||
handler.errored = !0; | ||
handler.value = error; | ||
var chunk = handler.chunk; | ||
null !== chunk && | ||
"blocked" === chunk.status && | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
} | ||
}; | ||
); | ||
return null; | ||
} | ||
function createModelReject(chunk) { | ||
return error => triggerErrorOnChunk(chunk, error); | ||
} | ||
function createServerReferenceProxy(response, metaData) { | ||
const callServer = response._callServer; | ||
const proxy = function () { | ||
// $FlowFixMe[method-unbinding] | ||
const args = Array.prototype.slice.call(arguments); | ||
const p = metaData.bound; | ||
if (!p) { | ||
return callServer(metaData.id, args); | ||
} | ||
if (p.status === INITIALIZED) { | ||
const bound = p.value; | ||
return callServer(metaData.id, bound.concat(args)); | ||
} // Since this is a fake Promise whose .then doesn't chain, we have to wrap it. | ||
// TODO: Remove the wrapper once that's fixed. | ||
return Promise.resolve(p).then(function (bound) { | ||
return callServer(metaData.id, bound.concat(args)); | ||
}); | ||
}; | ||
registerServerReference(proxy, metaData); | ||
return proxy; | ||
} | ||
function getOutlinedModel(response, id) { | ||
const chunk = getChunk(response, id); | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
function getOutlinedModel(response, reference, parentObject, key, map) { | ||
reference = reference.split(":"); | ||
var id = parseInt(reference[0], 16); | ||
id = getChunk(response, id); | ||
switch (id.status) { | ||
case "resolved_model": | ||
initializeModelChunk(id); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
{ | ||
return chunk.value; | ||
case "resolved_module": | ||
initializeModuleChunk(id); | ||
} | ||
switch (id.status) { | ||
case "fulfilled": | ||
var value = id.value; | ||
for (id = 1; id < reference.length; id++) { | ||
for (; value.$$typeof === REACT_LAZY_TYPE; ) | ||
if (((value = value._payload), "fulfilled" === value.status)) | ||
value = value.value; | ||
else | ||
return waitForReference( | ||
value, | ||
parentObject, | ||
key, | ||
response, | ||
map, | ||
reference.slice(id - 1) | ||
); | ||
value = value[reference[id]]; | ||
} | ||
// We always encode it first in the stream so it won't be pending. | ||
return map(response, value, parentObject, key); | ||
case "pending": | ||
case "blocked": | ||
return waitForReference(id, parentObject, key, response, map, reference); | ||
default: | ||
throw chunk.reason; | ||
return ( | ||
initializingHandler | ||
? ((initializingHandler.errored = !0), | ||
(initializingHandler.value = id.reason)) | ||
: (initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: id.reason, | ||
deps: 0, | ||
errored: !0 | ||
}), | ||
null | ||
); | ||
} | ||
} | ||
function createMap(response, model) { | ||
return new Map(model); | ||
} | ||
function createSet(response, model) { | ||
return new Set(model); | ||
} | ||
function createBlob(response, model) { | ||
return new Blob(model.slice(1), { type: model[0] }); | ||
} | ||
function createFormData(response, model) { | ||
response = new FormData(); | ||
for (var i = 0; i < model.length; i++) | ||
response.append(model[i][0], model[i][1]); | ||
return response; | ||
} | ||
function extractIterator(response, model) { | ||
return model[Symbol.iterator](); | ||
} | ||
function createModel(response, model) { | ||
return model; | ||
} | ||
function parseModelString(response, parentObject, key, value) { | ||
if (value[0] === '$') { | ||
if (value === '$') { | ||
// A very common symbol. | ||
return REACT_ELEMENT_TYPE; | ||
} | ||
if ("$" === value[0]) { | ||
if ("$" === value) | ||
return ( | ||
null !== initializingHandler && | ||
"0" === key && | ||
(initializingHandler = { | ||
parent: initializingHandler, | ||
chunk: null, | ||
value: null, | ||
deps: 0, | ||
errored: !1 | ||
}), | ||
REACT_ELEMENT_TYPE | ||
); | ||
switch (value[1]) { | ||
case '$': | ||
{ | ||
// This was an escaped string value. | ||
return value.slice(1); | ||
} | ||
case 'L': | ||
{ | ||
// Lazy node | ||
const id = parseInt(value.slice(2), 16); | ||
const chunk = getChunk(response, id); // We create a React.lazy wrapper around any lazy values. | ||
// When passed into React, we'll know how to suspend on this. | ||
return createLazyChunkWrapper(chunk); | ||
} | ||
case '@': | ||
{ | ||
// Promise | ||
const id = parseInt(value.slice(2), 16); | ||
const chunk = getChunk(response, id); | ||
return chunk; | ||
} | ||
case 'S': | ||
{ | ||
// Symbol | ||
return Symbol.for(value.slice(2)); | ||
} | ||
case 'F': | ||
{ | ||
// Server Reference | ||
const id = parseInt(value.slice(2), 16); | ||
const metadata = getOutlinedModel(response, id); | ||
return createServerReferenceProxy(response, metadata); | ||
} | ||
case 'Q': | ||
{ | ||
// Map | ||
const id = parseInt(value.slice(2), 16); | ||
const data = getOutlinedModel(response, id); | ||
return new Map(data); | ||
} | ||
case 'W': | ||
{ | ||
// Set | ||
const id = parseInt(value.slice(2), 16); | ||
const data = getOutlinedModel(response, id); | ||
return new Set(data); | ||
} | ||
case 'I': | ||
{ | ||
// $Infinity | ||
return Infinity; | ||
} | ||
case '-': | ||
{ | ||
// $-0 or $-Infinity | ||
if (value === '$-0') { | ||
return -0; | ||
} else { | ||
return -Infinity; | ||
} | ||
} | ||
case 'N': | ||
{ | ||
// $NaN | ||
return NaN; | ||
} | ||
case 'u': | ||
{ | ||
// matches "$undefined" | ||
// Special encoding for `undefined` which can't be serialized as JSON otherwise. | ||
return undefined; | ||
} | ||
case 'D': | ||
{ | ||
// Date | ||
return new Date(Date.parse(value.slice(2))); | ||
} | ||
case 'n': | ||
{ | ||
// BigInt | ||
return BigInt(value.slice(2)); | ||
} | ||
case "$": | ||
return value.slice(1); | ||
case "L": | ||
return ( | ||
(parentObject = parseInt(value.slice(2), 16)), | ||
(response = getChunk(response, parentObject)), | ||
createLazyChunkWrapper(response) | ||
); | ||
case "@": | ||
if (2 === value.length) return new Promise(function () {}); | ||
parentObject = parseInt(value.slice(2), 16); | ||
return getChunk(response, parentObject); | ||
case "S": | ||
return Symbol.for(value.slice(2)); | ||
case "F": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel( | ||
response, | ||
value, | ||
parentObject, | ||
key, | ||
loadServerReference | ||
) | ||
); | ||
case "T": | ||
parentObject = "$" + value.slice(2); | ||
response = response._tempRefs; | ||
if (null == response) | ||
throw Error( | ||
"Missing a temporary reference set but the RSC response returned a temporary reference. Pass a temporaryReference option with the set that was used with the reply." | ||
); | ||
return response.get(parentObject); | ||
case "Q": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createMap) | ||
); | ||
case "W": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createSet) | ||
); | ||
case "B": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createBlob) | ||
); | ||
case "K": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createFormData) | ||
); | ||
case "Z": | ||
return resolveErrorProd(); | ||
case "i": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, extractIterator) | ||
); | ||
case "I": | ||
return Infinity; | ||
case "-": | ||
return "$-0" === value ? -0 : -Infinity; | ||
case "N": | ||
return NaN; | ||
case "u": | ||
return; | ||
case "D": | ||
return new Date(Date.parse(value.slice(2))); | ||
case "n": | ||
return BigInt(value.slice(2)); | ||
default: | ||
{ | ||
// We assume that anything else is a reference ID. | ||
const id = parseInt(value.slice(1), 16); | ||
const chunk = getChunk(response, id); | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
break; | ||
case RESOLVED_MODULE: | ||
initializeModuleChunk(chunk); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
const chunkValue = chunk.value; | ||
return chunkValue; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
const parentChunk = initializingChunk; | ||
chunk.then(createModelResolver(parentChunk, parentObject, key, chunk.status === CYCLIC), createModelReject(parentChunk)); | ||
return null; | ||
default: | ||
throw chunk.reason; | ||
} | ||
} | ||
return ( | ||
(value = value.slice(1)), | ||
getOutlinedModel(response, value, parentObject, key, createModel) | ||
); | ||
} | ||
} | ||
return value; | ||
} | ||
function parseModelTuple(response, value) { | ||
const tuple = value; | ||
if (tuple[0] === REACT_ELEMENT_TYPE) { | ||
// TODO: Consider having React just directly accept these arrays as elements. | ||
// Or even change the ReactElement type to be an array. | ||
return createElement(tuple[1], tuple[2], tuple[3]); | ||
} | ||
return value; | ||
} | ||
function missingCall() { | ||
throw new Error('Trying to call a function from "use server" but the callServer option ' + 'was not implemented in your router runtime.'); | ||
throw Error( | ||
'Trying to call a function from "use server" but the callServer option was not implemented in your router runtime.' | ||
); | ||
} | ||
function createResponse(bundlerConfig, moduleLoading, callServer, nonce) { | ||
const chunks = new Map(); | ||
const response = { | ||
_bundlerConfig: bundlerConfig, | ||
_moduleLoading: moduleLoading, | ||
_callServer: callServer !== undefined ? callServer : missingCall, | ||
_nonce: nonce, | ||
_chunks: chunks, | ||
_stringDecoder: createStringDecoder(), | ||
_fromJSON: null, | ||
_rowState: 0, | ||
_rowID: 0, | ||
_rowTag: 0, | ||
_rowLength: 0, | ||
_buffer: [] | ||
}; // Don't inline this call because it causes closure to outline the call above. | ||
response._fromJSON = createFromJSONCallback(response); | ||
return response; | ||
function ResponseInstance( | ||
bundlerConfig, | ||
serverReferenceConfig, | ||
moduleLoading, | ||
callServer, | ||
encodeFormAction, | ||
nonce, | ||
temporaryReferences | ||
) { | ||
var chunks = new Map(); | ||
this._bundlerConfig = bundlerConfig; | ||
this._serverReferenceConfig = serverReferenceConfig; | ||
this._moduleLoading = moduleLoading; | ||
this._callServer = void 0 !== callServer ? callServer : missingCall; | ||
this._encodeFormAction = encodeFormAction; | ||
this._nonce = nonce; | ||
this._chunks = chunks; | ||
this._stringDecoder = new TextDecoder(); | ||
this._fromJSON = null; | ||
this._rowLength = this._rowTag = this._rowID = this._rowState = 0; | ||
this._buffer = []; | ||
this._tempRefs = temporaryReferences; | ||
this._fromJSON = createFromJSONCallback(this); | ||
} | ||
function resolveModel(response, id, model) { | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createResolvedModelChunk(response, model)); | ||
} else { | ||
resolveModelChunk(chunk, model); | ||
} | ||
function resolveBuffer(response, id, buffer) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk && "pending" !== chunk.status | ||
? chunk.reason.enqueueValue(buffer) | ||
: chunks.set(id, new ReactPromise("fulfilled", buffer, null, response)); | ||
} | ||
function resolveText(response, id, text) { | ||
const chunks = response._chunks; // We assume that we always reference large strings after they've been | ||
// emitted. | ||
chunks.set(id, createInitializedTextChunk(response, text)); | ||
function resolveModule(response, id, model) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
model = JSON.parse(model, response._fromJSON); | ||
var clientReference = resolveClientReference(response._bundlerConfig, model); | ||
prepareDestinationWithChunks( | ||
response._moduleLoading, | ||
model[1], | ||
response._nonce | ||
); | ||
if ((model = preloadModule(clientReference))) { | ||
if (chunk) { | ||
var blockedChunk = chunk; | ||
blockedChunk.status = "blocked"; | ||
} else | ||
(blockedChunk = new ReactPromise("blocked", null, null, response)), | ||
chunks.set(id, blockedChunk); | ||
model.then( | ||
function () { | ||
return resolveModuleChunk(blockedChunk, clientReference); | ||
}, | ||
function (error) { | ||
return triggerErrorOnChunk(blockedChunk, error); | ||
} | ||
); | ||
} else | ||
chunk | ||
? resolveModuleChunk(chunk, clientReference) | ||
: chunks.set( | ||
id, | ||
new ReactPromise("resolved_module", clientReference, null, response) | ||
); | ||
} | ||
function resolveBuffer(response, id, buffer) { | ||
const chunks = response._chunks; // We assume that we always reference buffers after they've been emitted. | ||
chunks.set(id, createInitializedBufferChunk(response, buffer)); | ||
function resolveStream(response, id, stream, controller) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk | ||
? "pending" === chunk.status && | ||
((response = chunk.value), | ||
(chunk.status = "fulfilled"), | ||
(chunk.value = stream), | ||
(chunk.reason = controller), | ||
null !== response && wakeChunk(response, chunk.value)) | ||
: chunks.set( | ||
id, | ||
new ReactPromise("fulfilled", stream, controller, response) | ||
); | ||
} | ||
function resolveModule(response, id, model) { | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
const clientReferenceMetadata = parseModel(response, model); | ||
const clientReference = resolveClientReference(response._bundlerConfig, clientReferenceMetadata); | ||
prepareDestinationForModule(response._moduleLoading, response._nonce, clientReferenceMetadata); // TODO: Add an option to encode modules that are lazy loaded. | ||
// For now we preload all modules as early as possible since it's likely | ||
// that we'll need them. | ||
const promise = preloadModule(clientReference); | ||
if (promise) { | ||
let blockedChunk; | ||
if (!chunk) { | ||
// Technically, we should just treat promise as the chunk in this | ||
// case. Because it'll just behave as any other promise. | ||
blockedChunk = createBlockedChunk(response); | ||
chunks.set(id, blockedChunk); | ||
} else { | ||
// This can't actually happen because we don't have any forward | ||
// references to modules. | ||
blockedChunk = chunk; | ||
blockedChunk.status = BLOCKED; | ||
function startReadableStream(response, id, type) { | ||
var controller = null; | ||
type = new ReadableStream({ | ||
type: type, | ||
start: function (c) { | ||
controller = c; | ||
} | ||
promise.then(() => resolveModuleChunk(blockedChunk, clientReference), error => triggerErrorOnChunk(blockedChunk, error)); | ||
} else { | ||
if (!chunk) { | ||
chunks.set(id, createResolvedModuleChunk(response, clientReference)); | ||
} else { | ||
// This can't actually happen because we don't have any forward | ||
// references to modules. | ||
resolveModuleChunk(chunk, clientReference); | ||
}); | ||
var previousBlockedChunk = null; | ||
resolveStream(response, id, type, { | ||
enqueueValue: function (value) { | ||
null === previousBlockedChunk | ||
? controller.enqueue(value) | ||
: previousBlockedChunk.then(function () { | ||
controller.enqueue(value); | ||
}); | ||
}, | ||
enqueueModel: function (json) { | ||
if (null === previousBlockedChunk) { | ||
var chunk = new ReactPromise("resolved_model", json, null, response); | ||
initializeModelChunk(chunk); | ||
"fulfilled" === chunk.status | ||
? controller.enqueue(chunk.value) | ||
: (chunk.then( | ||
function (v) { | ||
return controller.enqueue(v); | ||
}, | ||
function (e) { | ||
return controller.error(e); | ||
} | ||
), | ||
(previousBlockedChunk = chunk)); | ||
} else { | ||
chunk = previousBlockedChunk; | ||
var chunk$52 = createPendingChunk(response); | ||
chunk$52.then( | ||
function (v) { | ||
return controller.enqueue(v); | ||
}, | ||
function (e) { | ||
return controller.error(e); | ||
} | ||
); | ||
previousBlockedChunk = chunk$52; | ||
chunk.then(function () { | ||
previousBlockedChunk === chunk$52 && (previousBlockedChunk = null); | ||
resolveModelChunk(chunk$52, json); | ||
}); | ||
} | ||
}, | ||
close: function () { | ||
if (null === previousBlockedChunk) controller.close(); | ||
else { | ||
var blockedChunk = previousBlockedChunk; | ||
previousBlockedChunk = null; | ||
blockedChunk.then(function () { | ||
return controller.close(); | ||
}); | ||
} | ||
}, | ||
error: function (error) { | ||
if (null === previousBlockedChunk) controller.error(error); | ||
else { | ||
var blockedChunk = previousBlockedChunk; | ||
previousBlockedChunk = null; | ||
blockedChunk.then(function () { | ||
return controller.error(error); | ||
}); | ||
} | ||
} | ||
} | ||
}); | ||
} | ||
function resolveErrorProd(response, id, digest) { | ||
const error = new Error('An error occurred in the Server Components render. The specific message is omitted in production' + ' builds to avoid leaking sensitive details. A digest property is included on this error instance which' + ' may provide additional details about the nature of the error.'); | ||
error.stack = 'Error: ' + error.message; | ||
error.digest = digest; | ||
const errorWithDigest = error; | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createErrorChunk(response, errorWithDigest)); | ||
} else { | ||
triggerErrorOnChunk(chunk, errorWithDigest); | ||
} | ||
function asyncIterator() { | ||
return this; | ||
} | ||
function resolvePostponeProd(response, id) { | ||
const error = new Error('A Server Component was postponed. The reason is omitted in production' + ' builds to avoid leaking sensitive details.'); | ||
const postponeInstance = error; | ||
postponeInstance.$$typeof = REACT_POSTPONE_TYPE; | ||
postponeInstance.stack = 'Error: ' + error.message; | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createErrorChunk(response, postponeInstance)); | ||
} else { | ||
triggerErrorOnChunk(chunk, postponeInstance); | ||
} | ||
function createIterator(next) { | ||
next = { next: next }; | ||
next[ASYNC_ITERATOR] = asyncIterator; | ||
return next; | ||
} | ||
function startAsyncIterable(response, id, iterator) { | ||
var buffer = [], | ||
closed = !1, | ||
nextWriteIndex = 0, | ||
$jscomp$compprop0 = {}; | ||
$jscomp$compprop0 = | ||
(($jscomp$compprop0[ASYNC_ITERATOR] = function () { | ||
var nextReadIndex = 0; | ||
return createIterator(function (arg) { | ||
if (void 0 !== arg) | ||
throw Error( | ||
"Values cannot be passed to next() of AsyncIterables passed to Client Components." | ||
); | ||
if (nextReadIndex === buffer.length) { | ||
if (closed) | ||
return new ReactPromise( | ||
"fulfilled", | ||
{ done: !0, value: void 0 }, | ||
null, | ||
response | ||
); | ||
buffer[nextReadIndex] = createPendingChunk(response); | ||
} | ||
return buffer[nextReadIndex++]; | ||
}); | ||
}), | ||
$jscomp$compprop0); | ||
resolveStream( | ||
response, | ||
id, | ||
iterator ? $jscomp$compprop0[ASYNC_ITERATOR]() : $jscomp$compprop0, | ||
{ | ||
enqueueValue: function (value) { | ||
if (nextWriteIndex === buffer.length) | ||
buffer[nextWriteIndex] = new ReactPromise( | ||
"fulfilled", | ||
{ done: !1, value: value }, | ||
null, | ||
response | ||
); | ||
else { | ||
var chunk = buffer[nextWriteIndex], | ||
resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "fulfilled"; | ||
chunk.value = { done: !1, value: value }; | ||
null !== resolveListeners && | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
nextWriteIndex++; | ||
}, | ||
enqueueModel: function (value) { | ||
nextWriteIndex === buffer.length | ||
? (buffer[nextWriteIndex] = createResolvedIteratorResultChunk( | ||
response, | ||
value, | ||
!1 | ||
)) | ||
: resolveIteratorResultChunk(buffer[nextWriteIndex], value, !1); | ||
nextWriteIndex++; | ||
}, | ||
close: function (value) { | ||
closed = !0; | ||
nextWriteIndex === buffer.length | ||
? (buffer[nextWriteIndex] = createResolvedIteratorResultChunk( | ||
response, | ||
value, | ||
!0 | ||
)) | ||
: resolveIteratorResultChunk(buffer[nextWriteIndex], value, !0); | ||
for (nextWriteIndex++; nextWriteIndex < buffer.length; ) | ||
resolveIteratorResultChunk( | ||
buffer[nextWriteIndex++], | ||
'"$undefined"', | ||
!0 | ||
); | ||
}, | ||
error: function (error) { | ||
closed = !0; | ||
for ( | ||
nextWriteIndex === buffer.length && | ||
(buffer[nextWriteIndex] = createPendingChunk(response)); | ||
nextWriteIndex < buffer.length; | ||
function resolveHint(response, code, model) { | ||
const hintModel = parseModel(response, model); | ||
dispatchHint(code, hintModel); | ||
) | ||
triggerErrorOnChunk(buffer[nextWriteIndex++], error); | ||
} | ||
} | ||
); | ||
} | ||
function resolveErrorProd() { | ||
var error = Error( | ||
"An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error." | ||
); | ||
error.stack = "Error: " + error.message; | ||
return error; | ||
} | ||
function mergeBuffer(buffer, lastChunk) { | ||
const l = buffer.length; // Count the bytes we'll need | ||
let byteLength = lastChunk.length; | ||
for (let i = 0; i < l; i++) { | ||
for (var l = buffer.length, byteLength = lastChunk.length, i = 0; i < l; i++) | ||
byteLength += buffer[i].byteLength; | ||
} // Allocate enough contiguous space | ||
const result = new Uint8Array(byteLength); | ||
let offset = 0; // Copy all the buffers into it. | ||
for (let i = 0; i < l; i++) { | ||
const chunk = buffer[i]; | ||
result.set(chunk, offset); | ||
offset += chunk.byteLength; | ||
byteLength = new Uint8Array(byteLength); | ||
for (var i$53 = (i = 0); i$53 < l; i$53++) { | ||
var chunk = buffer[i$53]; | ||
byteLength.set(chunk, i); | ||
i += chunk.byteLength; | ||
} | ||
result.set(lastChunk, offset); | ||
return result; | ||
byteLength.set(lastChunk, i); | ||
return byteLength; | ||
} | ||
function resolveTypedArray(response, id, buffer, lastChunk, constructor, bytesPerElement) { | ||
// If the view fits into one original buffer, we just reuse that buffer instead of | ||
// copying it out to a separate copy. This means that it's not always possible to | ||
// transfer these values to other threads without copying first since they may | ||
// share array buffer. For this to work, it must also have bytes aligned to a | ||
// multiple of a size of the type. | ||
const chunk = buffer.length === 0 && lastChunk.byteOffset % bytesPerElement === 0 ? lastChunk : mergeBuffer(buffer, lastChunk); // TODO: The transfer protocol of RSC is little-endian. If the client isn't little-endian | ||
// we should convert it instead. In practice big endian isn't really Web compatible so it's | ||
// somewhat safe to assume that browsers aren't going to run it, but maybe there's some SSR | ||
// server that's affected. | ||
const view = new constructor(chunk.buffer, chunk.byteOffset, chunk.byteLength / bytesPerElement); | ||
resolveBuffer(response, id, view); | ||
function resolveTypedArray( | ||
response, | ||
id, | ||
buffer, | ||
lastChunk, | ||
constructor, | ||
bytesPerElement | ||
) { | ||
buffer = | ||
0 === buffer.length && 0 === lastChunk.byteOffset % bytesPerElement | ||
? lastChunk | ||
: mergeBuffer(buffer, lastChunk); | ||
constructor = new constructor( | ||
buffer.buffer, | ||
buffer.byteOffset, | ||
buffer.byteLength / bytesPerElement | ||
); | ||
resolveBuffer(response, id, constructor); | ||
} | ||
function processFullRow(response, id, tag, buffer, chunk) { | ||
{ | ||
switch (tag) { | ||
case 65 | ||
/* "A" */ | ||
: | ||
// We must always clone to extract it into a separate buffer instead of just a view. | ||
resolveBuffer(response, id, mergeBuffer(buffer, chunk).buffer); | ||
return; | ||
case 67 | ||
/* "C" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int8Array, 1); | ||
return; | ||
case 99 | ||
/* "c" */ | ||
: | ||
resolveBuffer(response, id, buffer.length === 0 ? chunk : mergeBuffer(buffer, chunk)); | ||
return; | ||
case 85 | ||
/* "U" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint8ClampedArray, 1); | ||
return; | ||
case 83 | ||
/* "S" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int16Array, 2); | ||
return; | ||
case 115 | ||
/* "s" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint16Array, 2); | ||
return; | ||
case 76 | ||
/* "L" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int32Array, 4); | ||
return; | ||
case 108 | ||
/* "l" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint32Array, 4); | ||
return; | ||
case 70 | ||
/* "F" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Float32Array, 4); | ||
return; | ||
case 100 | ||
/* "d" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Float64Array, 8); | ||
return; | ||
case 78 | ||
/* "N" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, BigInt64Array, 8); | ||
return; | ||
case 109 | ||
/* "m" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, BigUint64Array, 8); | ||
return; | ||
case 86 | ||
/* "V" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, DataView, 1); | ||
return; | ||
} | ||
function processFullBinaryRow(response, id, tag, buffer, chunk) { | ||
switch (tag) { | ||
case 65: | ||
resolveBuffer(response, id, mergeBuffer(buffer, chunk).buffer); | ||
return; | ||
case 79: | ||
resolveTypedArray(response, id, buffer, chunk, Int8Array, 1); | ||
return; | ||
case 111: | ||
resolveBuffer( | ||
response, | ||
id, | ||
0 === buffer.length ? chunk : mergeBuffer(buffer, chunk) | ||
); | ||
return; | ||
case 85: | ||
resolveTypedArray(response, id, buffer, chunk, Uint8ClampedArray, 1); | ||
return; | ||
case 83: | ||
resolveTypedArray(response, id, buffer, chunk, Int16Array, 2); | ||
return; | ||
case 115: | ||
resolveTypedArray(response, id, buffer, chunk, Uint16Array, 2); | ||
return; | ||
case 76: | ||
resolveTypedArray(response, id, buffer, chunk, Int32Array, 4); | ||
return; | ||
case 108: | ||
resolveTypedArray(response, id, buffer, chunk, Uint32Array, 4); | ||
return; | ||
case 71: | ||
resolveTypedArray(response, id, buffer, chunk, Float32Array, 4); | ||
return; | ||
case 103: | ||
resolveTypedArray(response, id, buffer, chunk, Float64Array, 8); | ||
return; | ||
case 77: | ||
resolveTypedArray(response, id, buffer, chunk, BigInt64Array, 8); | ||
return; | ||
case 109: | ||
resolveTypedArray(response, id, buffer, chunk, BigUint64Array, 8); | ||
return; | ||
case 86: | ||
resolveTypedArray(response, id, buffer, chunk, DataView, 1); | ||
return; | ||
} | ||
const stringDecoder = response._stringDecoder; | ||
let row = ''; | ||
for (let i = 0; i < buffer.length; i++) { | ||
row += readPartialStringChunk(stringDecoder, buffer[i]); | ||
} | ||
row += readFinalStringChunk(stringDecoder, chunk); | ||
for ( | ||
var stringDecoder = response._stringDecoder, row = "", i = 0; | ||
i < buffer.length; | ||
i++ | ||
) | ||
row += stringDecoder.decode(buffer[i], decoderOptions); | ||
buffer = row += stringDecoder.decode(chunk); | ||
switch (tag) { | ||
case 73 | ||
/* "I" */ | ||
: | ||
{ | ||
resolveModule(response, id, row); | ||
return; | ||
} | ||
case 72 | ||
/* "H" */ | ||
: | ||
{ | ||
const code = row[0]; | ||
resolveHint(response, code, row.slice(1)); | ||
return; | ||
} | ||
case 69 | ||
/* "E" */ | ||
: | ||
{ | ||
const errorInfo = JSON.parse(row); | ||
{ | ||
resolveErrorProd(response, id, errorInfo.digest); | ||
} | ||
return; | ||
} | ||
case 84 | ||
/* "T" */ | ||
: | ||
{ | ||
resolveText(response, id, row); | ||
return; | ||
} | ||
case 68 | ||
/* "D" */ | ||
: | ||
{ | ||
throw new Error('Failed to read a RSC payload created by a development version of React ' + 'on the server while using a production version on the client. Always use ' + 'matching versions on the server and the client.'); | ||
} | ||
case 80 | ||
/* "P" */ | ||
: | ||
{ | ||
{ | ||
{ | ||
resolvePostponeProd(response, id); | ||
} | ||
return; | ||
} | ||
} | ||
// Fallthrough | ||
default: | ||
/* """ "{" "[" "t" "f" "n" "0" - "9" */ | ||
{ | ||
// We assume anything else is JSON. | ||
resolveModel(response, id, row); | ||
return; | ||
} | ||
} | ||
} | ||
function processBinaryChunk(response, chunk) { | ||
let i = 0; | ||
let rowState = response._rowState; | ||
let rowID = response._rowID; | ||
let rowTag = response._rowTag; | ||
let rowLength = response._rowLength; | ||
const buffer = response._buffer; | ||
const chunkLength = chunk.length; | ||
while (i < chunkLength) { | ||
let lastIdx = -1; | ||
switch (rowState) { | ||
case ROW_ID: | ||
{ | ||
const byte = chunk[i++]; | ||
if (byte === 58 | ||
/* ":" */ | ||
) { | ||
// Finished the rowID, next we'll parse the tag. | ||
rowState = ROW_TAG; | ||
} else { | ||
rowID = rowID << 4 | (byte > 96 ? byte - 87 : byte - 48); | ||
} | ||
continue; | ||
} | ||
case ROW_TAG: | ||
{ | ||
const resolvedRowTag = chunk[i]; | ||
if (resolvedRowTag === 84 | ||
/* "T" */ | ||
|| (resolvedRowTag === 65 | ||
/* "A" */ | ||
|| resolvedRowTag === 67 | ||
/* "C" */ | ||
|| resolvedRowTag === 99 | ||
/* "c" */ | ||
|| resolvedRowTag === 85 | ||
/* "U" */ | ||
|| resolvedRowTag === 83 | ||
/* "S" */ | ||
|| resolvedRowTag === 115 | ||
/* "s" */ | ||
|| resolvedRowTag === 76 | ||
/* "L" */ | ||
|| resolvedRowTag === 108 | ||
/* "l" */ | ||
|| resolvedRowTag === 70 | ||
/* "F" */ | ||
|| resolvedRowTag === 100 | ||
/* "d" */ | ||
|| resolvedRowTag === 78 | ||
/* "N" */ | ||
|| resolvedRowTag === 109 | ||
/* "m" */ | ||
|| resolvedRowTag === 86) | ||
/* "V" */ | ||
) { | ||
rowTag = resolvedRowTag; | ||
rowState = ROW_LENGTH; | ||
i++; | ||
} else if (resolvedRowTag > 64 && resolvedRowTag < 91 | ||
/* "A"-"Z" */ | ||
) { | ||
rowTag = resolvedRowTag; | ||
rowState = ROW_CHUNK_BY_NEWLINE; | ||
i++; | ||
} else { | ||
rowTag = 0; | ||
rowState = ROW_CHUNK_BY_NEWLINE; // This was an unknown tag so it was probably part of the data. | ||
} | ||
continue; | ||
} | ||
case ROW_LENGTH: | ||
{ | ||
const byte = chunk[i++]; | ||
if (byte === 44 | ||
/* "," */ | ||
) { | ||
// Finished the rowLength, next we'll buffer up to that length. | ||
rowState = ROW_CHUNK_BY_LENGTH; | ||
} else { | ||
rowLength = rowLength << 4 | (byte > 96 ? byte - 87 : byte - 48); | ||
} | ||
continue; | ||
} | ||
case ROW_CHUNK_BY_NEWLINE: | ||
{ | ||
// We're looking for a newline | ||
lastIdx = chunk.indexOf(10 | ||
/* "\n" */ | ||
, i); | ||
case 73: | ||
resolveModule(response, id, buffer); | ||
break; | ||
case 72: | ||
id = buffer[0]; | ||
buffer = buffer.slice(1); | ||
response = JSON.parse(buffer, response._fromJSON); | ||
buffer = ReactDOMSharedInternals.d; | ||
switch (id) { | ||
case "D": | ||
buffer.D(response); | ||
break; | ||
} | ||
case ROW_CHUNK_BY_LENGTH: | ||
{ | ||
// We're looking for the remaining byte length | ||
lastIdx = i + rowLength; | ||
if (lastIdx > chunk.length) { | ||
lastIdx = -1; | ||
} | ||
case "C": | ||
"string" === typeof response | ||
? buffer.C(response) | ||
: buffer.C(response[0], response[1]); | ||
break; | ||
} | ||
} | ||
const offset = chunk.byteOffset + i; | ||
if (lastIdx > -1) { | ||
// We found the last chunk of the row | ||
const length = lastIdx - i; | ||
const lastChunk = new Uint8Array(chunk.buffer, offset, length); | ||
processFullRow(response, rowID, rowTag, buffer, lastChunk); // Reset state machine for a new row | ||
i = lastIdx; | ||
if (rowState === ROW_CHUNK_BY_NEWLINE) { | ||
// If we're trailing by a newline we need to skip it. | ||
i++; | ||
case "L": | ||
id = response[0]; | ||
tag = response[1]; | ||
3 === response.length | ||
? buffer.L(id, tag, response[2]) | ||
: buffer.L(id, tag); | ||
break; | ||
case "m": | ||
"string" === typeof response | ||
? buffer.m(response) | ||
: buffer.m(response[0], response[1]); | ||
break; | ||
case "X": | ||
"string" === typeof response | ||
? buffer.X(response) | ||
: buffer.X(response[0], response[1]); | ||
break; | ||
case "S": | ||
"string" === typeof response | ||
? buffer.S(response) | ||
: buffer.S( | ||
response[0], | ||
0 === response[1] ? void 0 : response[1], | ||
3 === response.length ? response[2] : void 0 | ||
); | ||
break; | ||
case "M": | ||
"string" === typeof response | ||
? buffer.M(response) | ||
: buffer.M(response[0], response[1]); | ||
} | ||
rowState = ROW_ID; | ||
rowTag = 0; | ||
rowID = 0; | ||
rowLength = 0; | ||
buffer.length = 0; | ||
} else { | ||
// The rest of this row is in a future chunk. We stash the rest of the | ||
// current chunk until we can process the full row. | ||
const length = chunk.byteLength - i; | ||
const remainingSlice = new Uint8Array(chunk.buffer, offset, length); | ||
buffer.push(remainingSlice); // Update how many bytes we're still waiting for. If we're looking for | ||
// a newline, this doesn't hurt since we'll just ignore it. | ||
rowLength -= remainingSlice.byteLength; | ||
break; | ||
} | ||
case 69: | ||
tag = JSON.parse(buffer); | ||
buffer = resolveErrorProd(); | ||
buffer.digest = tag.digest; | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) | ||
? triggerErrorOnChunk(chunk, buffer) | ||
: tag.set(id, new ReactPromise("rejected", null, buffer, response)); | ||
break; | ||
case 84: | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) && "pending" !== chunk.status | ||
? chunk.reason.enqueueValue(buffer) | ||
: tag.set(id, new ReactPromise("fulfilled", buffer, null, response)); | ||
break; | ||
case 78: | ||
case 68: | ||
case 87: | ||
throw Error( | ||
"Failed to read a RSC payload created by a development version of React on the server while using a production version on the client. Always use matching versions on the server and the client." | ||
); | ||
case 82: | ||
startReadableStream(response, id, void 0); | ||
break; | ||
case 114: | ||
startReadableStream(response, id, "bytes"); | ||
break; | ||
case 88: | ||
startAsyncIterable(response, id, !1); | ||
break; | ||
case 120: | ||
startAsyncIterable(response, id, !0); | ||
break; | ||
case 67: | ||
(response = response._chunks.get(id)) && | ||
"fulfilled" === response.status && | ||
response.reason.close("" === buffer ? '"$undefined"' : buffer); | ||
break; | ||
case 80: | ||
buffer = Error( | ||
"A Server Component was postponed. The reason is omitted in production builds to avoid leaking sensitive details." | ||
); | ||
buffer.$$typeof = REACT_POSTPONE_TYPE; | ||
buffer.stack = "Error: " + buffer.message; | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) | ||
? triggerErrorOnChunk(chunk, buffer) | ||
: tag.set(id, new ReactPromise("rejected", null, buffer, response)); | ||
break; | ||
default: | ||
(tag = response._chunks), | ||
(chunk = tag.get(id)) | ||
? resolveModelChunk(chunk, buffer) | ||
: tag.set( | ||
id, | ||
new ReactPromise("resolved_model", buffer, null, response) | ||
); | ||
} | ||
response._rowState = rowState; | ||
response._rowID = rowID; | ||
response._rowTag = rowTag; | ||
response._rowLength = rowLength; | ||
} | ||
function parseModel(response, json) { | ||
return JSON.parse(json, response._fromJSON); | ||
} | ||
function createFromJSONCallback(response) { | ||
// $FlowFixMe[missing-this-annot] | ||
return function (key, value) { | ||
if (typeof value === 'string') { | ||
// We can't use .bind here because we need the "this" value. | ||
if ("string" === typeof value) | ||
return parseModelString(response, this, key, value); | ||
if ("object" === typeof value && null !== value) { | ||
if (value[0] === REACT_ELEMENT_TYPE) { | ||
if ( | ||
((key = { | ||
$$typeof: REACT_ELEMENT_TYPE, | ||
type: value[1], | ||
key: value[2], | ||
ref: null, | ||
props: value[3] | ||
}), | ||
null !== initializingHandler) | ||
) | ||
if ( | ||
((value = initializingHandler), | ||
(initializingHandler = value.parent), | ||
value.errored) | ||
) | ||
(key = new ReactPromise("rejected", null, value.value, response)), | ||
(key = createLazyChunkWrapper(key)); | ||
else if (0 < value.deps) { | ||
var blockedChunk = new ReactPromise( | ||
"blocked", | ||
null, | ||
null, | ||
response | ||
); | ||
value.value = key; | ||
value.chunk = blockedChunk; | ||
key = createLazyChunkWrapper(blockedChunk); | ||
} | ||
} else key = value; | ||
return key; | ||
} | ||
if (typeof value === 'object' && value !== null) { | ||
return parseModelTuple(response, value); | ||
} | ||
return value; | ||
}; | ||
} | ||
function close(response) { | ||
// In case there are any remaining unresolved chunks, they won't | ||
// be resolved now. So we need to issue an error to those. | ||
// Ideally we should be able to early bail out if we kept a | ||
// ref count of pending chunks. | ||
reportGlobalError(response, new Error('Connection closed.')); | ||
} | ||
function noServerCall() { | ||
throw new Error('Server Functions cannot be called during initial render. ' + 'This would create a fetch waterfall. Try to use a Server Component ' + 'to pass data to Client Components instead.'); | ||
throw Error( | ||
"Server Functions cannot be called during initial render. This would create a fetch waterfall. Try to use a Server Component to pass data to Client Components instead." | ||
); | ||
} | ||
function createServerReference(id, callServer) { | ||
return createServerReference$1(id, noServerCall); | ||
} | ||
function createResponseFromOptions(options) { | ||
return createResponse(options.ssrManifest.moduleMap, options.ssrManifest.moduleLoading, noServerCall, typeof options.nonce === 'string' ? options.nonce : undefined); | ||
return new ResponseInstance( | ||
options.serverConsumerManifest.moduleMap, | ||
options.serverConsumerManifest.serverModuleMap, | ||
options.serverConsumerManifest.moduleLoading, | ||
noServerCall, | ||
options.encodeFormAction, | ||
"string" === typeof options.nonce ? options.nonce : void 0, | ||
options && options.temporaryReferences | ||
? options.temporaryReferences | ||
: void 0 | ||
); | ||
} | ||
function startReadingFromStream(response, stream) { | ||
const reader = stream.getReader(); | ||
function progress(_ref) { | ||
let done = _ref.done, | ||
value = _ref.value; | ||
var value = _ref.value; | ||
if (_ref.done) reportGlobalError(response, Error("Connection closed.")); | ||
else { | ||
var i = 0, | ||
rowState = response._rowState; | ||
_ref = response._rowID; | ||
for ( | ||
var rowTag = response._rowTag, | ||
rowLength = response._rowLength, | ||
buffer = response._buffer, | ||
chunkLength = value.length; | ||
i < chunkLength; | ||
if (done) { | ||
close(response); | ||
return; | ||
) { | ||
var lastIdx = -1; | ||
switch (rowState) { | ||
case 0: | ||
lastIdx = value[i++]; | ||
58 === lastIdx | ||
? (rowState = 1) | ||
: (_ref = | ||
(_ref << 4) | (96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 1: | ||
rowState = value[i]; | ||
84 === rowState || | ||
65 === rowState || | ||
79 === rowState || | ||
111 === rowState || | ||
85 === rowState || | ||
83 === rowState || | ||
115 === rowState || | ||
76 === rowState || | ||
108 === rowState || | ||
71 === rowState || | ||
103 === rowState || | ||
77 === rowState || | ||
109 === rowState || | ||
86 === rowState | ||
? ((rowTag = rowState), (rowState = 2), i++) | ||
: (64 < rowState && 91 > rowState) || | ||
35 === rowState || | ||
114 === rowState || | ||
120 === rowState | ||
? ((rowTag = rowState), (rowState = 3), i++) | ||
: ((rowTag = 0), (rowState = 3)); | ||
continue; | ||
case 2: | ||
lastIdx = value[i++]; | ||
44 === lastIdx | ||
? (rowState = 4) | ||
: (rowLength = | ||
(rowLength << 4) | | ||
(96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 3: | ||
lastIdx = value.indexOf(10, i); | ||
break; | ||
case 4: | ||
(lastIdx = i + rowLength), lastIdx > value.length && (lastIdx = -1); | ||
} | ||
var offset = value.byteOffset + i; | ||
if (-1 < lastIdx) | ||
(rowLength = new Uint8Array(value.buffer, offset, lastIdx - i)), | ||
processFullBinaryRow(response, _ref, rowTag, buffer, rowLength), | ||
(i = lastIdx), | ||
3 === rowState && i++, | ||
(rowLength = _ref = rowTag = rowState = 0), | ||
(buffer.length = 0); | ||
else { | ||
value = new Uint8Array(value.buffer, offset, value.byteLength - i); | ||
buffer.push(value); | ||
rowLength -= value.byteLength; | ||
break; | ||
} | ||
} | ||
response._rowState = rowState; | ||
response._rowID = _ref; | ||
response._rowTag = rowTag; | ||
response._rowLength = rowLength; | ||
return reader.read().then(progress).catch(error); | ||
} | ||
const buffer = value; | ||
processBinaryChunk(response, buffer); | ||
return reader.read().then(progress).catch(error); | ||
} | ||
function error(e) { | ||
reportGlobalError(response, e); | ||
} | ||
var reader = stream.getReader(); | ||
reader.read().then(progress).catch(error); | ||
} | ||
function createFromReadableStream(stream, options) { | ||
const response = createResponseFromOptions(options); | ||
startReadingFromStream(response, stream); | ||
return getRoot(response); | ||
} | ||
function createFromFetch(promiseForResponse, options) { | ||
const response = createResponseFromOptions(options); | ||
promiseForResponse.then(function (r) { | ||
startReadingFromStream(response, r.body); | ||
}, function (e) { | ||
reportGlobalError(response, e); | ||
exports.createFromFetch = function (promiseForResponse, options) { | ||
var response = createResponseFromOptions(options); | ||
promiseForResponse.then( | ||
function (r) { | ||
startReadingFromStream(response, r.body); | ||
}, | ||
function (e) { | ||
reportGlobalError(response, e); | ||
} | ||
); | ||
return getChunk(response, 0); | ||
}; | ||
exports.createFromReadableStream = function (stream, options) { | ||
options = createResponseFromOptions(options); | ||
startReadingFromStream(options, stream); | ||
return getChunk(options, 0); | ||
}; | ||
exports.createServerReference = function (id) { | ||
return createServerReference$1(id, noServerCall); | ||
}; | ||
exports.createTemporaryReferenceSet = function () { | ||
return new Map(); | ||
}; | ||
exports.encodeReply = function (value, options) { | ||
return new Promise(function (resolve, reject) { | ||
var abort = processReply( | ||
value, | ||
"", | ||
options && options.temporaryReferences | ||
? options.temporaryReferences | ||
: void 0, | ||
resolve, | ||
reject | ||
); | ||
if (options && options.signal) { | ||
var signal = options.signal; | ||
if (signal.aborted) abort(signal.reason); | ||
else { | ||
var listener = function () { | ||
abort(signal.reason); | ||
signal.removeEventListener("abort", listener); | ||
}; | ||
signal.addEventListener("abort", listener); | ||
} | ||
} | ||
}); | ||
return getRoot(response); | ||
} | ||
function encodeReply(value) | ||
/* We don't use URLSearchParams yet but maybe */ | ||
{ | ||
return new Promise((resolve, reject) => { | ||
processReply(value, '', resolve, reject); | ||
}); | ||
} | ||
exports.createFromFetch = createFromFetch; | ||
exports.createFromReadableStream = createFromReadableStream; | ||
exports.createServerReference = createServerReference; | ||
exports.encodeReply = encodeReply; | ||
}; |
/** | ||
* @license React | ||
* react-server-dom-webpack-client.node.production.min.js | ||
* react-server-dom-webpack-client.node.production.js | ||
* | ||
@@ -11,846 +11,673 @@ * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
'use strict'; | ||
var util = require('util'); | ||
var ReactDOM = require('react-dom'); | ||
function createStringDecoder() { | ||
return new util.TextDecoder(); | ||
} | ||
const decoderOptions = { | ||
stream: true | ||
}; | ||
function readPartialStringChunk(decoder, buffer) { | ||
return decoder.decode(buffer, decoderOptions); | ||
} | ||
function readFinalStringChunk(decoder, buffer) { | ||
return decoder.decode(buffer); | ||
} | ||
// This is the parsed shape of the wire format which is why it is | ||
// condensed to only the essentialy information | ||
const ID = 0; | ||
const CHUNKS = 1; | ||
const NAME = 2; // export const ASYNC = 3; | ||
// This logic is correct because currently only include the 4th tuple member | ||
// when the module is async. If that changes we will need to actually assert | ||
// the value is true. We don't index into the 4th slot because flow does not | ||
// like the potential out of bounds access | ||
function isAsyncImport(metadata) { | ||
return metadata.length === 4; | ||
} | ||
// The reason this function needs to defined here in this file instead of just | ||
// being exported directly from the WebpackDestination... file is because the | ||
// ClientReferenceMetadata is opaque and we can't unwrap it there. | ||
// This should get inlined and we could also just implement an unwrapping function | ||
// though that risks it getting used in places it shouldn't be. This is unfortunate | ||
// but currently it seems to be the best option we have. | ||
function prepareDestinationForModule(moduleLoading, nonce, metadata) { | ||
prepareDestinationWithChunks(moduleLoading, metadata[CHUNKS], nonce); | ||
} | ||
"use strict"; | ||
var util = require("util"), | ||
ReactDOM = require("react-dom"), | ||
decoderOptions = { stream: !0 }; | ||
function resolveClientReference(bundlerConfig, metadata) { | ||
if (bundlerConfig) { | ||
const moduleExports = bundlerConfig[metadata[ID]]; | ||
let resolvedModuleData = moduleExports[metadata[NAME]]; | ||
let name; | ||
if (resolvedModuleData) { | ||
// The potentially aliased name. | ||
name = resolvedModuleData.name; | ||
} else { | ||
// If we don't have this specific name, we might have the full module. | ||
resolvedModuleData = moduleExports['*']; | ||
if (!resolvedModuleData) { | ||
throw new Error('Could not find the module "' + metadata[ID] + '" in the React SSR Manifest. ' + 'This is probably a bug in the React Server Components bundler.'); | ||
} | ||
name = metadata[NAME]; | ||
var moduleExports = bundlerConfig[metadata[0]]; | ||
if ((bundlerConfig = moduleExports && moduleExports[metadata[2]])) | ||
moduleExports = bundlerConfig.name; | ||
else { | ||
bundlerConfig = moduleExports && moduleExports["*"]; | ||
if (!bundlerConfig) | ||
throw Error( | ||
'Could not find the module "' + | ||
metadata[0] + | ||
'" in the React Server Consumer Manifest. This is probably a bug in the React Server Components bundler.' | ||
); | ||
moduleExports = metadata[2]; | ||
} | ||
if (isAsyncImport(metadata)) { | ||
return [resolvedModuleData.id, resolvedModuleData.chunks, name, 1 | ||
/* async */ | ||
]; | ||
} else { | ||
return [resolvedModuleData.id, resolvedModuleData.chunks, name]; | ||
} | ||
return 4 === metadata.length | ||
? [bundlerConfig.id, bundlerConfig.chunks, moduleExports, 1] | ||
: [bundlerConfig.id, bundlerConfig.chunks, moduleExports]; | ||
} | ||
return metadata; | ||
} | ||
// If they're still pending they're a thenable. This map also exists | ||
// in Webpack but unfortunately it's not exposed so we have to | ||
// replicate it in user space. null means that it has already loaded. | ||
const chunkCache = new Map(); | ||
function resolveServerReference(bundlerConfig, id) { | ||
var name = "", | ||
resolvedModuleData = bundlerConfig[id]; | ||
if (resolvedModuleData) name = resolvedModuleData.name; | ||
else { | ||
var idx = id.lastIndexOf("#"); | ||
-1 !== idx && | ||
((name = id.slice(idx + 1)), | ||
(resolvedModuleData = bundlerConfig[id.slice(0, idx)])); | ||
if (!resolvedModuleData) | ||
throw Error( | ||
'Could not find the module "' + | ||
id + | ||
'" in the React Server Manifest. This is probably a bug in the React Server Components bundler.' | ||
); | ||
} | ||
return resolvedModuleData.async | ||
? [resolvedModuleData.id, resolvedModuleData.chunks, name, 1] | ||
: [resolvedModuleData.id, resolvedModuleData.chunks, name]; | ||
} | ||
var chunkCache = new Map(); | ||
function requireAsyncModule(id) { | ||
// We've already loaded all the chunks. We can require the module. | ||
const promise = __webpack_require__(id); | ||
if (typeof promise.then !== 'function') { | ||
// This wasn't a promise after all. | ||
var promise = __webpack_require__(id); | ||
if ("function" !== typeof promise.then || "fulfilled" === promise.status) | ||
return null; | ||
} else if (promise.status === 'fulfilled') { | ||
// This module was already resolved earlier. | ||
return null; | ||
} else { | ||
// Instrument the Promise to stash the result. | ||
promise.then(value => { | ||
const fulfilledThenable = promise; | ||
fulfilledThenable.status = 'fulfilled'; | ||
fulfilledThenable.value = value; | ||
}, reason => { | ||
const rejectedThenable = promise; | ||
rejectedThenable.status = 'rejected'; | ||
rejectedThenable.reason = reason; | ||
}); | ||
return promise; | ||
} | ||
promise.then( | ||
function (value) { | ||
promise.status = "fulfilled"; | ||
promise.value = value; | ||
}, | ||
function (reason) { | ||
promise.status = "rejected"; | ||
promise.reason = reason; | ||
} | ||
); | ||
return promise; | ||
} | ||
function ignoreReject() {// We rely on rejected promises to be handled by another listener. | ||
} // Start preloading the modules since we might need them soon. | ||
// This function doesn't suspend. | ||
function ignoreReject() {} | ||
function preloadModule(metadata) { | ||
const chunks = metadata[CHUNKS]; | ||
const promises = []; | ||
let i = 0; | ||
while (i < chunks.length) { | ||
const chunkId = chunks[i++]; | ||
for (var chunks = metadata[1], promises = [], i = 0; i < chunks.length; ) { | ||
var chunkId = chunks[i++]; | ||
chunks[i++]; | ||
const entry = chunkCache.get(chunkId); | ||
if (entry === undefined) { | ||
const thenable = loadChunk(chunkId); | ||
promises.push(thenable); // $FlowFixMe[method-unbinding] | ||
const resolve = chunkCache.set.bind(chunkCache, chunkId, null); | ||
thenable.then(resolve, ignoreReject); | ||
chunkCache.set(chunkId, thenable); | ||
} else if (entry !== null) { | ||
var entry = chunkCache.get(chunkId); | ||
if (void 0 === entry) { | ||
entry = __webpack_chunk_load__(chunkId); | ||
promises.push(entry); | ||
} | ||
var resolve = chunkCache.set.bind(chunkCache, chunkId, null); | ||
entry.then(resolve, ignoreReject); | ||
chunkCache.set(chunkId, entry); | ||
} else null !== entry && promises.push(entry); | ||
} | ||
if (isAsyncImport(metadata)) { | ||
if (promises.length === 0) { | ||
return requireAsyncModule(metadata[ID]); | ||
} else { | ||
return Promise.all(promises).then(() => { | ||
return requireAsyncModule(metadata[ID]); | ||
}); | ||
} | ||
} else if (promises.length > 0) { | ||
return Promise.all(promises); | ||
} else { | ||
return null; | ||
} | ||
} // Actually require the module or suspend if it's not yet ready. | ||
// Increase priority if necessary. | ||
return 4 === metadata.length | ||
? 0 === promises.length | ||
? requireAsyncModule(metadata[0]) | ||
: Promise.all(promises).then(function () { | ||
return requireAsyncModule(metadata[0]); | ||
}) | ||
: 0 < promises.length | ||
? Promise.all(promises) | ||
: null; | ||
} | ||
function requireModule(metadata) { | ||
let moduleExports = __webpack_require__(metadata[ID]); | ||
if (isAsyncImport(metadata)) { | ||
if (typeof moduleExports.then !== 'function') ; else if (moduleExports.status === 'fulfilled') { | ||
// This Promise should've been instrumented by preloadModule. | ||
var moduleExports = __webpack_require__(metadata[0]); | ||
if (4 === metadata.length && "function" === typeof moduleExports.then) | ||
if ("fulfilled" === moduleExports.status) | ||
moduleExports = moduleExports.value; | ||
} else { | ||
throw moduleExports.reason; | ||
else throw moduleExports.reason; | ||
return "*" === metadata[2] | ||
? moduleExports | ||
: "" === metadata[2] | ||
? moduleExports.__esModule | ||
? moduleExports.default | ||
: moduleExports | ||
: moduleExports[metadata[2]]; | ||
} | ||
function prepareDestinationWithChunks(moduleLoading, chunks, nonce$jscomp$0) { | ||
if (null !== moduleLoading) | ||
for (var i = 1; i < chunks.length; i += 2) { | ||
var nonce = nonce$jscomp$0, | ||
JSCompiler_temp_const = ReactDOMSharedInternals.d, | ||
JSCompiler_temp_const$jscomp$0 = JSCompiler_temp_const.X, | ||
JSCompiler_temp_const$jscomp$1 = moduleLoading.prefix + chunks[i]; | ||
var JSCompiler_inline_result = moduleLoading.crossOrigin; | ||
JSCompiler_inline_result = | ||
"string" === typeof JSCompiler_inline_result | ||
? "use-credentials" === JSCompiler_inline_result | ||
? JSCompiler_inline_result | ||
: "" | ||
: void 0; | ||
JSCompiler_temp_const$jscomp$0.call( | ||
JSCompiler_temp_const, | ||
JSCompiler_temp_const$jscomp$1, | ||
{ crossOrigin: JSCompiler_inline_result, nonce: nonce } | ||
); | ||
} | ||
} | ||
if (metadata[NAME] === '*') { | ||
// This is a placeholder value that represents that the caller imported this | ||
// as a CommonJS module as is. | ||
return moduleExports; | ||
} | ||
if (metadata[NAME] === '') { | ||
// This is a placeholder value that represents that the caller accessed the | ||
// default property of this if it was an ESM interop module. | ||
return moduleExports.__esModule ? moduleExports.default : moduleExports; | ||
} | ||
return moduleExports[metadata[NAME]]; | ||
} | ||
function loadChunk(chunkId, filename) { | ||
return __webpack_chunk_load__(chunkId); | ||
var ReactDOMSharedInternals = | ||
ReactDOM.__DOM_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE, | ||
REACT_ELEMENT_TYPE = Symbol.for("react.transitional.element"), | ||
REACT_LAZY_TYPE = Symbol.for("react.lazy"), | ||
REACT_POSTPONE_TYPE = Symbol.for("react.postpone"), | ||
MAYBE_ITERATOR_SYMBOL = Symbol.iterator; | ||
function getIteratorFn(maybeIterable) { | ||
if (null === maybeIterable || "object" !== typeof maybeIterable) return null; | ||
maybeIterable = | ||
(MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL]) || | ||
maybeIterable["@@iterator"]; | ||
return "function" === typeof maybeIterable ? maybeIterable : null; | ||
} | ||
function prepareDestinationWithChunks(moduleLoading, // Chunks are double-indexed [..., idx, filenamex, idy, filenamey, ...] | ||
chunks, nonce) { | ||
if (moduleLoading !== null) { | ||
for (let i = 1; i < chunks.length; i += 2) { | ||
preinitScriptForSSR(moduleLoading.prefix + chunks[i], nonce, moduleLoading.crossOrigin); | ||
var ASYNC_ITERATOR = Symbol.asyncIterator, | ||
isArrayImpl = Array.isArray, | ||
getPrototypeOf = Object.getPrototypeOf, | ||
ObjectPrototype = Object.prototype, | ||
knownServerReferences = new WeakMap(); | ||
function serializeNumber(number) { | ||
return Number.isFinite(number) | ||
? 0 === number && -Infinity === 1 / number | ||
? "$-0" | ||
: number | ||
: Infinity === number | ||
? "$Infinity" | ||
: -Infinity === number | ||
? "$-Infinity" | ||
: "$NaN"; | ||
} | ||
function processReply( | ||
root, | ||
formFieldPrefix, | ||
temporaryReferences, | ||
resolve, | ||
reject | ||
) { | ||
function serializeTypedArray(tag, typedArray) { | ||
typedArray = new Blob([ | ||
new Uint8Array( | ||
typedArray.buffer, | ||
typedArray.byteOffset, | ||
typedArray.byteLength | ||
) | ||
]); | ||
var blobId = nextPartId++; | ||
null === formData && (formData = new FormData()); | ||
formData.append(formFieldPrefix + blobId, typedArray); | ||
return "$" + tag + blobId.toString(16); | ||
} | ||
function serializeBinaryReader(reader) { | ||
function progress(entry) { | ||
entry.done | ||
? ((entry = nextPartId++), | ||
data.append(formFieldPrefix + entry, new Blob(buffer)), | ||
data.append( | ||
formFieldPrefix + streamId, | ||
'"$o' + entry.toString(16) + '"' | ||
), | ||
data.append(formFieldPrefix + streamId, "C"), | ||
pendingParts--, | ||
0 === pendingParts && resolve(data)) | ||
: (buffer.push(entry.value), | ||
reader.read(new Uint8Array(1024)).then(progress, reject)); | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++, | ||
buffer = []; | ||
reader.read(new Uint8Array(1024)).then(progress, reject); | ||
return "$r" + streamId.toString(16); | ||
} | ||
} | ||
const ReactDOMSharedInternals = ReactDOM.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED; | ||
function getCrossOriginString(input) { | ||
if (typeof input === 'string') { | ||
return input === 'use-credentials' ? input : ''; | ||
} | ||
return undefined; | ||
} | ||
// This client file is in the shared folder because it applies to both SSR and browser contexts. | ||
const ReactDOMCurrentDispatcher = ReactDOMSharedInternals.Dispatcher; | ||
function dispatchHint(code, model) { | ||
const dispatcher = ReactDOMCurrentDispatcher.current; | ||
if (dispatcher) { | ||
switch (code) { | ||
case 'D': | ||
{ | ||
const refined = refineModel(code, model); | ||
const href = refined; | ||
dispatcher.prefetchDNS(href); | ||
return; | ||
function serializeReader(reader) { | ||
function progress(entry) { | ||
if (entry.done) | ||
data.append(formFieldPrefix + streamId, "C"), | ||
pendingParts--, | ||
0 === pendingParts && resolve(data); | ||
else | ||
try { | ||
var partJSON = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, partJSON); | ||
reader.read().then(progress, reject); | ||
} catch (x) { | ||
reject(x); | ||
} | ||
case 'C': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preconnect(href); | ||
} else { | ||
const href = refined[0]; | ||
const crossOrigin = refined[1]; | ||
dispatcher.preconnect(href, crossOrigin); | ||
} | ||
return; | ||
} | ||
case 'L': | ||
{ | ||
const refined = refineModel(code, model); | ||
const href = refined[0]; | ||
const as = refined[1]; | ||
if (refined.length === 3) { | ||
const options = refined[2]; | ||
dispatcher.preload(href, as, options); | ||
} else { | ||
dispatcher.preload(href, as); | ||
} | ||
return; | ||
} | ||
case 'm': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preloadModule(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preloadModule(href, options); | ||
} | ||
return; | ||
} | ||
case 'S': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitStyle(href); | ||
} else { | ||
const href = refined[0]; | ||
const precedence = refined[1] === 0 ? undefined : refined[1]; | ||
const options = refined.length === 3 ? refined[2] : undefined; | ||
dispatcher.preinitStyle(href, precedence, options); | ||
} | ||
return; | ||
} | ||
case 'X': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitScript(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preinitScript(href, options); | ||
} | ||
return; | ||
} | ||
case 'M': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitModuleScript(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preinitModuleScript(href, options); | ||
} | ||
return; | ||
} | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++; | ||
reader.read().then(progress, reject); | ||
return "$R" + streamId.toString(16); | ||
} | ||
} // Flow is having trouble refining the HintModels so we help it a bit. | ||
// This should be compiled out in the production build. | ||
function refineModel(code, model) { | ||
return model; | ||
} | ||
function preinitScriptForSSR(href, nonce, crossOrigin) { | ||
const dispatcher = ReactDOMCurrentDispatcher.current; | ||
if (dispatcher) { | ||
dispatcher.preinitScript(href, { | ||
crossOrigin: getCrossOriginString(crossOrigin), | ||
nonce | ||
}); | ||
function serializeReadableStream(stream) { | ||
try { | ||
var binaryReader = stream.getReader({ mode: "byob" }); | ||
} catch (x) { | ||
return serializeReader(stream.getReader()); | ||
} | ||
return serializeBinaryReader(binaryReader); | ||
} | ||
} | ||
// ATTENTION | ||
// When adding new symbols to this file, | ||
// Please consider also adding to 'react-devtools-shared/src/backend/ReactSymbols' | ||
// The Symbol used to tag the ReactElement-like types. | ||
const REACT_ELEMENT_TYPE = Symbol.for('react.element'); | ||
const REACT_LAZY_TYPE = Symbol.for('react.lazy'); | ||
const REACT_POSTPONE_TYPE = Symbol.for('react.postpone'); | ||
const MAYBE_ITERATOR_SYMBOL = Symbol.iterator; | ||
const FAUX_ITERATOR_SYMBOL = '@@iterator'; | ||
function getIteratorFn(maybeIterable) { | ||
if (maybeIterable === null || typeof maybeIterable !== 'object') { | ||
return null; | ||
} | ||
const maybeIterator = MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL]; | ||
if (typeof maybeIterator === 'function') { | ||
return maybeIterator; | ||
} | ||
return null; | ||
} | ||
const isArrayImpl = Array.isArray; // eslint-disable-next-line no-redeclare | ||
function isArray(a) { | ||
return isArrayImpl(a); | ||
} | ||
const getPrototypeOf = Object.getPrototypeOf; | ||
const ObjectPrototype = Object.prototype; | ||
const knownServerReferences = new WeakMap(); // Serializable values | ||
// Thenable<ReactServerValue> | ||
// function serializeByValueID(id: number): string { | ||
// return '$' + id.toString(16); | ||
// } | ||
function serializePromiseID(id) { | ||
return '$@' + id.toString(16); | ||
} | ||
function serializeServerReferenceID(id) { | ||
return '$F' + id.toString(16); | ||
} | ||
function serializeSymbolReference(name) { | ||
return '$S' + name; | ||
} | ||
function serializeFormDataReference(id) { | ||
// Why K? F is "Function". D is "Date". What else? | ||
return '$K' + id.toString(16); | ||
} | ||
function serializeNumber(number) { | ||
if (Number.isFinite(number)) { | ||
if (number === 0 && 1 / number === -Infinity) { | ||
return '$-0'; | ||
} else { | ||
return number; | ||
function serializeAsyncIterable(iterable, iterator) { | ||
function progress(entry) { | ||
if (entry.done) { | ||
if (void 0 === entry.value) | ||
data.append(formFieldPrefix + streamId, "C"); | ||
else | ||
try { | ||
var partJSON = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, "C" + partJSON); | ||
} catch (x) { | ||
reject(x); | ||
return; | ||
} | ||
pendingParts--; | ||
0 === pendingParts && resolve(data); | ||
} else | ||
try { | ||
var partJSON$22 = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, partJSON$22); | ||
iterator.next().then(progress, reject); | ||
} catch (x$23) { | ||
reject(x$23); | ||
} | ||
} | ||
} else { | ||
if (number === Infinity) { | ||
return '$Infinity'; | ||
} else if (number === -Infinity) { | ||
return '$-Infinity'; | ||
} else { | ||
return '$NaN'; | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++; | ||
iterable = iterable === iterator; | ||
iterator.next().then(progress, reject); | ||
return "$" + (iterable ? "x" : "X") + streamId.toString(16); | ||
} | ||
} | ||
function serializeUndefined() { | ||
return '$undefined'; | ||
} | ||
function serializeDateFromDateJSON(dateJSON) { | ||
// JSON.stringify automatically calls Date.prototype.toJSON which calls toISOString. | ||
// We need only tack on a $D prefix. | ||
return '$D' + dateJSON; | ||
} | ||
function serializeBigInt(n) { | ||
return '$n' + n.toString(10); | ||
} | ||
function serializeMapID(id) { | ||
return '$Q' + id.toString(16); | ||
} | ||
function serializeSetID(id) { | ||
return '$W' + id.toString(16); | ||
} | ||
function escapeStringValue(value) { | ||
if (value[0] === '$') { | ||
// We need to escape $ prefixed strings since we use those to encode | ||
// references to IDs and as special symbol values. | ||
return '$' + value; | ||
} else { | ||
return value; | ||
} | ||
} | ||
function processReply(root, formFieldPrefix, resolve, reject) { | ||
let nextPartId = 1; | ||
let pendingParts = 0; | ||
let formData = null; | ||
function resolveToJSON(key, value) { | ||
const parent = this; // Make sure that `parent[key]` wasn't JSONified before `value` was passed to us | ||
if (value === null) { | ||
return null; | ||
} | ||
if (typeof value === 'object') { | ||
// $FlowFixMe[method-unbinding] | ||
if (typeof value.then === 'function') { | ||
// We assume that any object with a .then property is a "Thenable" type, | ||
// or a Promise type. Either of which can be represented by a Promise. | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to stream this value. | ||
formData = new FormData(); | ||
} | ||
if (null === value) return null; | ||
if ("object" === typeof value) { | ||
switch (value.$$typeof) { | ||
case REACT_ELEMENT_TYPE: | ||
if (void 0 !== temporaryReferences && -1 === key.indexOf(":")) { | ||
var parentReference = writtenObjects.get(this); | ||
if (void 0 !== parentReference) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), | ||
"$T" | ||
); | ||
} | ||
throw Error( | ||
"React Element cannot be passed to Server Functions from the Client without a temporary reference set. Pass a TemporaryReferenceSet to the options." | ||
); | ||
case REACT_LAZY_TYPE: | ||
parentReference = value._payload; | ||
var init = value._init; | ||
null === formData && (formData = new FormData()); | ||
pendingParts++; | ||
try { | ||
var resolvedModel = init(parentReference), | ||
lazyId = nextPartId++, | ||
partJSON = serializeModel(resolvedModel, lazyId); | ||
formData.append(formFieldPrefix + lazyId, partJSON); | ||
return "$" + lazyId.toString(16); | ||
} catch (x) { | ||
if ( | ||
"object" === typeof x && | ||
null !== x && | ||
"function" === typeof x.then | ||
) { | ||
pendingParts++; | ||
var lazyId$24 = nextPartId++; | ||
parentReference = function () { | ||
try { | ||
var partJSON$25 = serializeModel(value, lazyId$24), | ||
data$26 = formData; | ||
data$26.append(formFieldPrefix + lazyId$24, partJSON$25); | ||
pendingParts--; | ||
0 === pendingParts && resolve(data$26); | ||
} catch (reason) { | ||
reject(reason); | ||
} | ||
}; | ||
x.then(parentReference, parentReference); | ||
return "$" + lazyId$24.toString(16); | ||
} | ||
reject(x); | ||
return null; | ||
} finally { | ||
pendingParts--; | ||
} | ||
} | ||
if ("function" === typeof value.then) { | ||
null === formData && (formData = new FormData()); | ||
pendingParts++; | ||
const promiseId = nextPartId++; | ||
const thenable = value; | ||
thenable.then(partValue => { | ||
const partJSON = JSON.stringify(partValue, resolveToJSON); // $FlowFixMe[incompatible-type] We know it's not null because we assigned it above. | ||
const data = formData; // eslint-disable-next-line react-internal/safe-string-coercion | ||
data.append(formFieldPrefix + promiseId, partJSON); | ||
pendingParts--; | ||
if (pendingParts === 0) { | ||
resolve(data); | ||
var promiseId = nextPartId++; | ||
value.then(function (partValue) { | ||
try { | ||
var partJSON$28 = serializeModel(partValue, promiseId); | ||
partValue = formData; | ||
partValue.append(formFieldPrefix + promiseId, partJSON$28); | ||
pendingParts--; | ||
0 === pendingParts && resolve(partValue); | ||
} catch (reason) { | ||
reject(reason); | ||
} | ||
}, reason => { | ||
// In the future we could consider serializing this as an error | ||
// that throws on the server instead. | ||
reject(reason); | ||
}); | ||
return serializePromiseID(promiseId); | ||
}, reject); | ||
return "$@" + promiseId.toString(16); | ||
} | ||
if (isArray(value)) { | ||
// $FlowFixMe[incompatible-return] | ||
return value; | ||
} // TODO: Should we the Object.prototype.toString.call() to test for cross-realm objects? | ||
parentReference = writtenObjects.get(value); | ||
if (void 0 !== parentReference) | ||
if (modelRoot === value) modelRoot = null; | ||
else return parentReference; | ||
else | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference && | ||
((key = parentReference + ":" + key), | ||
writtenObjects.set(value, key), | ||
void 0 !== temporaryReferences && | ||
temporaryReferences.set(key, value))); | ||
if (isArrayImpl(value)) return value; | ||
if (value instanceof FormData) { | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to use rich objects as its values. | ||
formData = new FormData(); | ||
} | ||
const data = formData; | ||
const refId = nextPartId++; // Copy all the form fields with a prefix for this reference. | ||
// These must come first in the form order because we assume that all the | ||
// fields are available before this is referenced. | ||
const prefix = formFieldPrefix + refId + '_'; // $FlowFixMe[prop-missing]: FormData has forEach. | ||
value.forEach((originalValue, originalKey) => { | ||
data.append(prefix + originalKey, originalValue); | ||
null === formData && (formData = new FormData()); | ||
var data$32 = formData; | ||
key = nextPartId++; | ||
var prefix = formFieldPrefix + key + "_"; | ||
value.forEach(function (originalValue, originalKey) { | ||
data$32.append(prefix + originalKey, originalValue); | ||
}); | ||
return serializeFormDataReference(refId); | ||
return "$K" + key.toString(16); | ||
} | ||
if (value instanceof Map) { | ||
const partJSON = JSON.stringify(Array.from(value), resolveToJSON); | ||
if (formData === null) { | ||
formData = new FormData(); | ||
} | ||
const mapId = nextPartId++; | ||
formData.append(formFieldPrefix + mapId, partJSON); | ||
return serializeMapID(mapId); | ||
if (value instanceof Map) | ||
return ( | ||
(key = nextPartId++), | ||
(parentReference = serializeModel(Array.from(value), key)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$Q" + key.toString(16) | ||
); | ||
if (value instanceof Set) | ||
return ( | ||
(key = nextPartId++), | ||
(parentReference = serializeModel(Array.from(value), key)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$W" + key.toString(16) | ||
); | ||
if (value instanceof ArrayBuffer) | ||
return ( | ||
(key = new Blob([value])), | ||
(parentReference = nextPartId++), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + parentReference, key), | ||
"$A" + parentReference.toString(16) | ||
); | ||
if (value instanceof Int8Array) return serializeTypedArray("O", value); | ||
if (value instanceof Uint8Array) return serializeTypedArray("o", value); | ||
if (value instanceof Uint8ClampedArray) | ||
return serializeTypedArray("U", value); | ||
if (value instanceof Int16Array) return serializeTypedArray("S", value); | ||
if (value instanceof Uint16Array) return serializeTypedArray("s", value); | ||
if (value instanceof Int32Array) return serializeTypedArray("L", value); | ||
if (value instanceof Uint32Array) return serializeTypedArray("l", value); | ||
if (value instanceof Float32Array) return serializeTypedArray("G", value); | ||
if (value instanceof Float64Array) return serializeTypedArray("g", value); | ||
if (value instanceof BigInt64Array) | ||
return serializeTypedArray("M", value); | ||
if (value instanceof BigUint64Array) | ||
return serializeTypedArray("m", value); | ||
if (value instanceof DataView) return serializeTypedArray("V", value); | ||
if ("function" === typeof Blob && value instanceof Blob) | ||
return ( | ||
null === formData && (formData = new FormData()), | ||
(key = nextPartId++), | ||
formData.append(formFieldPrefix + key, value), | ||
"$B" + key.toString(16) | ||
); | ||
if ((key = getIteratorFn(value))) | ||
return ( | ||
(parentReference = key.call(value)), | ||
parentReference === value | ||
? ((key = nextPartId++), | ||
(parentReference = serializeModel( | ||
Array.from(parentReference), | ||
key | ||
)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$i" + key.toString(16)) | ||
: Array.from(parentReference) | ||
); | ||
if ( | ||
"function" === typeof ReadableStream && | ||
value instanceof ReadableStream | ||
) | ||
return serializeReadableStream(value); | ||
key = value[ASYNC_ITERATOR]; | ||
if ("function" === typeof key) | ||
return serializeAsyncIterable(value, key.call(value)); | ||
key = getPrototypeOf(value); | ||
if ( | ||
key !== ObjectPrototype && | ||
(null === key || null !== getPrototypeOf(key)) | ||
) { | ||
if (void 0 === temporaryReferences) | ||
throw Error( | ||
"Only plain objects, and a few built-ins, can be passed to Server Functions. Classes or null prototypes are not supported." | ||
); | ||
return "$T"; | ||
} | ||
if (value instanceof Set) { | ||
const partJSON = JSON.stringify(Array.from(value), resolveToJSON); | ||
if (formData === null) { | ||
formData = new FormData(); | ||
} | ||
const setId = nextPartId++; | ||
formData.append(formFieldPrefix + setId, partJSON); | ||
return serializeSetID(setId); | ||
} | ||
const iteratorFn = getIteratorFn(value); | ||
if (iteratorFn) { | ||
return Array.from(value); | ||
} // Verify that this is a simple plain object. | ||
const proto = getPrototypeOf(value); | ||
if (proto !== ObjectPrototype && (proto === null || getPrototypeOf(proto) !== null)) { | ||
throw new Error('Only plain objects, and a few built-ins, can be passed to Server Actions. ' + 'Classes or null prototypes are not supported.'); | ||
} | ||
return value; | ||
} | ||
if (typeof value === 'string') { | ||
// TODO: Maybe too clever. If we support URL there's no similar trick. | ||
if (value[value.length - 1] === 'Z') { | ||
// Possibly a Date, whose toJSON automatically calls toISOString | ||
// $FlowFixMe[incompatible-use] | ||
const originalValue = parent[key]; | ||
if (originalValue instanceof Date) { | ||
return serializeDateFromDateJSON(value); | ||
} | ||
} | ||
return escapeStringValue(value); | ||
if ("string" === typeof value) { | ||
if ("Z" === value[value.length - 1] && this[key] instanceof Date) | ||
return "$D" + value; | ||
key = "$" === value[0] ? "$" + value : value; | ||
return key; | ||
} | ||
if (typeof value === 'boolean') { | ||
return value; | ||
if ("boolean" === typeof value) return value; | ||
if ("number" === typeof value) return serializeNumber(value); | ||
if ("undefined" === typeof value) return "$undefined"; | ||
if ("function" === typeof value) { | ||
parentReference = knownServerReferences.get(value); | ||
if (void 0 !== parentReference) | ||
return ( | ||
(key = JSON.stringify(parentReference, resolveToJSON)), | ||
null === formData && (formData = new FormData()), | ||
(parentReference = nextPartId++), | ||
formData.set(formFieldPrefix + parentReference, key), | ||
"$F" + parentReference.toString(16) | ||
); | ||
if ( | ||
void 0 !== temporaryReferences && | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference) | ||
) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), "$T" | ||
); | ||
throw Error( | ||
"Client Functions cannot be passed directly to Server Functions. Only Functions passed from the Server can be passed back again." | ||
); | ||
} | ||
if (typeof value === 'number') { | ||
return serializeNumber(value); | ||
if ("symbol" === typeof value) { | ||
if ( | ||
void 0 !== temporaryReferences && | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference) | ||
) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), "$T" | ||
); | ||
throw Error( | ||
"Symbols cannot be passed to a Server Function without a temporary reference set. Pass a TemporaryReferenceSet to the options." | ||
); | ||
} | ||
if (typeof value === 'undefined') { | ||
return serializeUndefined(); | ||
} | ||
if (typeof value === 'function') { | ||
const metaData = knownServerReferences.get(value); | ||
if (metaData !== undefined) { | ||
const metaDataJSON = JSON.stringify(metaData, resolveToJSON); | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to stream this value. | ||
formData = new FormData(); | ||
} // The reference to this function came from the same client so we can pass it back. | ||
const refId = nextPartId++; // eslint-disable-next-line react-internal/safe-string-coercion | ||
formData.set(formFieldPrefix + refId, metaDataJSON); | ||
return serializeServerReferenceID(refId); | ||
} | ||
throw new Error('Client Functions cannot be passed directly to Server Functions. ' + 'Only Functions passed from the Server can be passed back again.'); | ||
} | ||
if (typeof value === 'symbol') { | ||
// $FlowFixMe[incompatible-type] `description` might be undefined | ||
const name = value.description; | ||
if (Symbol.for(name) !== value) { | ||
throw new Error('Only global symbols received from Symbol.for(...) can be passed to Server Functions. ' + ("The symbol Symbol.for(" + // $FlowFixMe[incompatible-type] `description` might be undefined | ||
value.description + ") cannot be found among global symbols.")); | ||
} | ||
return serializeSymbolReference(name); | ||
} | ||
if (typeof value === 'bigint') { | ||
return serializeBigInt(value); | ||
} | ||
throw new Error("Type " + typeof value + " is not supported as an argument to a Server Function."); | ||
} // $FlowFixMe[incompatible-type] it's not going to be undefined because we'll encode it. | ||
const json = JSON.stringify(root, resolveToJSON); | ||
if (formData === null) { | ||
// If it's a simple data structure, we just use plain JSON. | ||
resolve(json); | ||
} else { | ||
// Otherwise, we use FormData to let us stream in the result. | ||
formData.set(formFieldPrefix + '0', json); | ||
if (pendingParts === 0) { | ||
// $FlowFixMe[incompatible-call] this has already been refined. | ||
resolve(formData); | ||
} | ||
if ("bigint" === typeof value) return "$n" + value.toString(10); | ||
throw Error( | ||
"Type " + | ||
typeof value + | ||
" is not supported as an argument to a Server Function." | ||
); | ||
} | ||
function serializeModel(model, id) { | ||
"object" === typeof model && | ||
null !== model && | ||
((id = "$" + id.toString(16)), | ||
writtenObjects.set(model, id), | ||
void 0 !== temporaryReferences && temporaryReferences.set(id, model)); | ||
modelRoot = model; | ||
return JSON.stringify(model, resolveToJSON); | ||
} | ||
var nextPartId = 1, | ||
pendingParts = 0, | ||
formData = null, | ||
writtenObjects = new WeakMap(), | ||
modelRoot = root, | ||
json = serializeModel(root, 0); | ||
null === formData | ||
? resolve(json) | ||
: (formData.set(formFieldPrefix + "0", json), | ||
0 === pendingParts && resolve(formData)); | ||
return function () { | ||
0 < pendingParts && | ||
((pendingParts = 0), | ||
null === formData ? resolve(json) : resolve(formData)); | ||
}; | ||
} | ||
const boundCache = new WeakMap(); | ||
var boundCache = new WeakMap(); | ||
function encodeFormData(reference) { | ||
let resolve, reject; // We need to have a handle on the thenable so that we can synchronously set | ||
// its status from processReply, when it can complete synchronously. | ||
const thenable = new Promise((res, rej) => { | ||
resolve = res; | ||
reject = rej; | ||
}); | ||
processReply(reference, '', body => { | ||
if (typeof body === 'string') { | ||
const data = new FormData(); | ||
data.append('0', body); | ||
body = data; | ||
var resolve, | ||
reject, | ||
thenable = new Promise(function (res, rej) { | ||
resolve = res; | ||
reject = rej; | ||
}); | ||
processReply( | ||
reference, | ||
"", | ||
void 0, | ||
function (body) { | ||
if ("string" === typeof body) { | ||
var data = new FormData(); | ||
data.append("0", body); | ||
body = data; | ||
} | ||
thenable.status = "fulfilled"; | ||
thenable.value = body; | ||
resolve(body); | ||
}, | ||
function (e) { | ||
thenable.status = "rejected"; | ||
thenable.reason = e; | ||
reject(e); | ||
} | ||
const fulfilled = thenable; | ||
fulfilled.status = 'fulfilled'; | ||
fulfilled.value = body; | ||
resolve(body); | ||
}, e => { | ||
const rejected = thenable; | ||
rejected.status = 'rejected'; | ||
rejected.reason = e; | ||
reject(e); | ||
}); | ||
); | ||
return thenable; | ||
} | ||
function encodeFormAction(identifierPrefix) { | ||
const reference = knownServerReferences.get(this); | ||
if (!reference) { | ||
throw new Error('Tried to encode a Server Action from a different instance than the encoder is from. ' + 'This is a bug in React.'); | ||
} | ||
let data = null; | ||
let name; | ||
const boundPromise = reference.bound; | ||
if (boundPromise !== null) { | ||
let thenable = boundCache.get(reference); | ||
if (!thenable) { | ||
thenable = encodeFormData(reference); | ||
boundCache.set(reference, thenable); | ||
} | ||
if (thenable.status === 'rejected') { | ||
throw thenable.reason; | ||
} else if (thenable.status !== 'fulfilled') { | ||
throw thenable; | ||
} | ||
const encodedFormData = thenable.value; // This is hacky but we need the identifier prefix to be added to | ||
// all fields but the suspense cache would break since we might get | ||
// a new identifier each time. So we just append it at the end instead. | ||
const prefixedData = new FormData(); // $FlowFixMe[prop-missing] | ||
encodedFormData.forEach((value, key) => { | ||
prefixedData.append('$ACTION_' + identifierPrefix + ':' + key, value); | ||
function defaultEncodeFormAction(identifierPrefix) { | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
var data = null; | ||
if (null !== reference.bound) { | ||
data = boundCache.get(reference); | ||
data || | ||
((data = encodeFormData(reference)), boundCache.set(reference, data)); | ||
if ("rejected" === data.status) throw data.reason; | ||
if ("fulfilled" !== data.status) throw data; | ||
reference = data.value; | ||
var prefixedData = new FormData(); | ||
reference.forEach(function (value, key) { | ||
prefixedData.append("$ACTION_" + identifierPrefix + ":" + key, value); | ||
}); | ||
data = prefixedData; // We encode the name of the prefix containing the data. | ||
name = '$ACTION_REF_' + identifierPrefix; | ||
} else { | ||
// This is the simple case so we can just encode the ID. | ||
name = '$ACTION_ID_' + reference.id; | ||
} | ||
data = prefixedData; | ||
reference = "$ACTION_REF_" + identifierPrefix; | ||
} else reference = "$ACTION_ID_" + reference.id; | ||
return { | ||
name: name, | ||
method: 'POST', | ||
encType: 'multipart/form-data', | ||
name: reference, | ||
method: "POST", | ||
encType: "multipart/form-data", | ||
data: data | ||
}; | ||
} | ||
function isSignatureEqual(referenceId, numberOfBoundArgs) { | ||
const reference = knownServerReferences.get(this); | ||
if (!reference) { | ||
throw new Error('Tried to encode a Server Action from a different instance than the encoder is from. ' + 'This is a bug in React.'); | ||
} | ||
if (reference.id !== referenceId) { | ||
// These are different functions. | ||
return false; | ||
} // Now check if the number of bound arguments is the same. | ||
const boundPromise = reference.bound; | ||
if (boundPromise === null) { | ||
// No bound arguments. | ||
return numberOfBoundArgs === 0; | ||
} // Unwrap the bound arguments array by suspending, if necessary. As with | ||
// encodeFormData, this means isSignatureEqual can only be called while React | ||
// is rendering. | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
if (reference.id !== referenceId) return !1; | ||
var boundPromise = reference.bound; | ||
if (null === boundPromise) return 0 === numberOfBoundArgs; | ||
switch (boundPromise.status) { | ||
case 'fulfilled': | ||
{ | ||
const boundArgs = boundPromise.value; | ||
return boundArgs.length === numberOfBoundArgs; | ||
} | ||
case 'pending': | ||
{ | ||
throw boundPromise; | ||
} | ||
case 'rejected': | ||
{ | ||
throw boundPromise.reason; | ||
} | ||
case "fulfilled": | ||
return boundPromise.value.length === numberOfBoundArgs; | ||
case "pending": | ||
throw boundPromise; | ||
case "rejected": | ||
throw boundPromise.reason; | ||
default: | ||
{ | ||
if (typeof boundPromise.status === 'string') ; else { | ||
const pendingThenable = boundPromise; | ||
pendingThenable.status = 'pending'; | ||
pendingThenable.then(boundArgs => { | ||
const fulfilledThenable = boundPromise; | ||
fulfilledThenable.status = 'fulfilled'; | ||
fulfilledThenable.value = boundArgs; | ||
}, error => { | ||
const rejectedThenable = boundPromise; | ||
rejectedThenable.status = 'rejected'; | ||
rejectedThenable.reason = error; | ||
}); | ||
} | ||
throw boundPromise; | ||
} | ||
throw ( | ||
("string" !== typeof boundPromise.status && | ||
((boundPromise.status = "pending"), | ||
boundPromise.then( | ||
function (boundArgs) { | ||
boundPromise.status = "fulfilled"; | ||
boundPromise.value = boundArgs; | ||
}, | ||
function (error) { | ||
boundPromise.status = "rejected"; | ||
boundPromise.reason = error; | ||
} | ||
)), | ||
boundPromise) | ||
); | ||
} | ||
} | ||
function registerServerReference(proxy, reference) { | ||
// Expose encoder for use by SSR, as well as a special bind that can be used to | ||
// keep server capabilities. | ||
{ | ||
// Only expose this in builds that would actually use it. Not needed on the client. | ||
Object.defineProperties(proxy, { | ||
$$FORM_ACTION: { | ||
value: encodeFormAction | ||
}, | ||
$$IS_SIGNATURE_EQUAL: { | ||
value: isSignatureEqual | ||
}, | ||
bind: { | ||
value: bind | ||
} | ||
}); | ||
} | ||
knownServerReferences.set(proxy, reference); | ||
} // $FlowFixMe[method-unbinding] | ||
const FunctionBind = Function.prototype.bind; // $FlowFixMe[method-unbinding] | ||
const ArraySlice = Array.prototype.slice; | ||
function registerServerReference(proxy, reference$jscomp$0, encodeFormAction) { | ||
Object.defineProperties(proxy, { | ||
$$FORM_ACTION: { | ||
value: | ||
void 0 === encodeFormAction | ||
? defaultEncodeFormAction | ||
: function () { | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
var boundPromise = reference.bound; | ||
null === boundPromise && (boundPromise = Promise.resolve([])); | ||
return encodeFormAction(reference.id, boundPromise); | ||
} | ||
}, | ||
$$IS_SIGNATURE_EQUAL: { value: isSignatureEqual }, | ||
bind: { value: bind } | ||
}); | ||
knownServerReferences.set(proxy, reference$jscomp$0); | ||
} | ||
var FunctionBind = Function.prototype.bind, | ||
ArraySlice = Array.prototype.slice; | ||
function bind() { | ||
// $FlowFixMe[unsupported-syntax] | ||
const newFn = FunctionBind.apply(this, arguments); | ||
const reference = knownServerReferences.get(this); | ||
var newFn = FunctionBind.apply(this, arguments), | ||
reference = knownServerReferences.get(this); | ||
if (reference) { | ||
const args = ArraySlice.call(arguments, 1); | ||
let boundPromise = null; | ||
if (reference.bound !== null) { | ||
boundPromise = Promise.resolve(reference.bound).then(boundArgs => boundArgs.concat(args)); | ||
} else { | ||
boundPromise = Promise.resolve(args); | ||
} | ||
registerServerReference(newFn, { | ||
id: reference.id, | ||
bound: boundPromise | ||
var args = ArraySlice.call(arguments, 1), | ||
boundPromise = null; | ||
boundPromise = | ||
null !== reference.bound | ||
? Promise.resolve(reference.bound).then(function (boundArgs) { | ||
return boundArgs.concat(args); | ||
}) | ||
: Promise.resolve(args); | ||
Object.defineProperties(newFn, { | ||
$$FORM_ACTION: { value: this.$$FORM_ACTION }, | ||
$$IS_SIGNATURE_EQUAL: { value: isSignatureEqual }, | ||
bind: { value: bind } | ||
}); | ||
knownServerReferences.set(newFn, { id: reference.id, bound: boundPromise }); | ||
} | ||
return newFn; | ||
} | ||
function createServerReference$1(id, callServer) { | ||
const proxy = function () { | ||
// $FlowFixMe[method-unbinding] | ||
const args = Array.prototype.slice.call(arguments); | ||
function createBoundServerReference(metaData, callServer, encodeFormAction) { | ||
function action() { | ||
var args = Array.prototype.slice.call(arguments); | ||
return bound | ||
? "fulfilled" === bound.status | ||
? callServer(id, bound.value.concat(args)) | ||
: Promise.resolve(bound).then(function (boundArgs) { | ||
return callServer(id, boundArgs.concat(args)); | ||
}) | ||
: callServer(id, args); | ||
} | ||
var id = metaData.id, | ||
bound = metaData.bound; | ||
registerServerReference(action, { id: id, bound: bound }, encodeFormAction); | ||
return action; | ||
} | ||
function createServerReference$1(id, callServer, encodeFormAction) { | ||
function action() { | ||
var args = Array.prototype.slice.call(arguments); | ||
return callServer(id, args); | ||
}; | ||
registerServerReference(proxy, { | ||
id, | ||
bound: null | ||
}); | ||
return proxy; | ||
} | ||
registerServerReference(action, { id: id, bound: null }, encodeFormAction); | ||
return action; | ||
} | ||
const ROW_ID = 0; | ||
const ROW_TAG = 1; | ||
const ROW_LENGTH = 2; | ||
const ROW_CHUNK_BY_NEWLINE = 3; | ||
const ROW_CHUNK_BY_LENGTH = 4; | ||
const PENDING = 'pending'; | ||
const BLOCKED = 'blocked'; | ||
const CYCLIC = 'cyclic'; | ||
const RESOLVED_MODEL = 'resolved_model'; | ||
const RESOLVED_MODULE = 'resolved_module'; | ||
const INITIALIZED = 'fulfilled'; | ||
const ERRORED = 'rejected'; // Dev-only | ||
// $FlowFixMe[missing-this-annot] | ||
function Chunk(status, value, reason, response) { | ||
function ReactPromise(status, value, reason, response) { | ||
this.status = status; | ||
@@ -860,78 +687,41 @@ this.value = value; | ||
this._response = response; | ||
} // We subclass Promise.prototype so that we get other methods like .catch | ||
Chunk.prototype = Object.create(Promise.prototype); // TODO: This doesn't return a new Promise chain unlike the real .then | ||
Chunk.prototype.then = function (resolve, reject) { | ||
const chunk = this; // If we have resolved content, we try to initialize it first which | ||
// might put us back into one of the other states. | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
} | ||
ReactPromise.prototype = Object.create(Promise.prototype); | ||
ReactPromise.prototype.then = function (resolve, reject) { | ||
switch (this.status) { | ||
case "resolved_model": | ||
initializeModelChunk(this); | ||
break; | ||
case RESOLVED_MODULE: | ||
initializeModuleChunk(chunk); | ||
case "resolved_module": | ||
initializeModuleChunk(this); | ||
} | ||
switch (this.status) { | ||
case "fulfilled": | ||
resolve(this.value); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
resolve(chunk.value); | ||
case "pending": | ||
case "blocked": | ||
resolve && | ||
(null === this.value && (this.value = []), this.value.push(resolve)); | ||
reject && | ||
(null === this.reason && (this.reason = []), this.reason.push(reject)); | ||
break; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
if (resolve) { | ||
if (chunk.value === null) { | ||
chunk.value = []; | ||
} | ||
chunk.value.push(resolve); | ||
} | ||
if (reject) { | ||
if (chunk.reason === null) { | ||
chunk.reason = []; | ||
} | ||
chunk.reason.push(reject); | ||
} | ||
break; | ||
default: | ||
reject(chunk.reason); | ||
break; | ||
reject && reject(this.reason); | ||
} | ||
}; | ||
function readChunk(chunk) { | ||
// If we have resolved content, we try to initialize it first which | ||
// might put us back into one of the other states. | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
case "resolved_model": | ||
initializeModelChunk(chunk); | ||
break; | ||
case RESOLVED_MODULE: | ||
case "resolved_module": | ||
initializeModuleChunk(chunk); | ||
break; | ||
} // The status might have changed after initialization. | ||
} | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
case "fulfilled": | ||
return chunk.value; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
// eslint-disable-next-line no-throw-literal | ||
case "pending": | ||
case "blocked": | ||
throw chunk; | ||
default: | ||
@@ -941,1017 +731,1135 @@ throw chunk.reason; | ||
} | ||
function getRoot(response) { | ||
const chunk = getChunk(response, 0); | ||
return chunk; | ||
} | ||
function createPendingChunk(response) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(PENDING, null, null, response); | ||
return new ReactPromise("pending", null, null, response); | ||
} | ||
function createBlockedChunk(response) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(BLOCKED, null, null, response); | ||
} | ||
function createErrorChunk(response, error) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(ERRORED, null, error, response); | ||
} | ||
function wakeChunk(listeners, value) { | ||
for (let i = 0; i < listeners.length; i++) { | ||
const listener = listeners[i]; | ||
listener(value); | ||
} | ||
for (var i = 0; i < listeners.length; i++) (0, listeners[i])(value); | ||
} | ||
function wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners) { | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
case "fulfilled": | ||
wakeChunk(resolveListeners, chunk.value); | ||
break; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
chunk.value = resolveListeners; | ||
chunk.reason = rejectListeners; | ||
case "pending": | ||
case "blocked": | ||
if (chunk.value) | ||
for (var i = 0; i < resolveListeners.length; i++) | ||
chunk.value.push(resolveListeners[i]); | ||
else chunk.value = resolveListeners; | ||
if (chunk.reason) { | ||
if (rejectListeners) | ||
for ( | ||
resolveListeners = 0; | ||
resolveListeners < rejectListeners.length; | ||
resolveListeners++ | ||
) | ||
chunk.reason.push(rejectListeners[resolveListeners]); | ||
} else chunk.reason = rejectListeners; | ||
break; | ||
case ERRORED: | ||
if (rejectListeners) { | ||
wakeChunk(rejectListeners, chunk.reason); | ||
} | ||
break; | ||
case "rejected": | ||
rejectListeners && wakeChunk(rejectListeners, chunk.reason); | ||
} | ||
} | ||
function triggerErrorOnChunk(chunk, error) { | ||
if (chunk.status !== PENDING && chunk.status !== BLOCKED) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" !== chunk.status && "blocked" !== chunk.status) | ||
chunk.reason.error(error); | ||
else { | ||
var listeners = chunk.reason; | ||
chunk.status = "rejected"; | ||
chunk.reason = error; | ||
null !== listeners && wakeChunk(listeners, error); | ||
} | ||
const listeners = chunk.reason; | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
if (listeners !== null) { | ||
wakeChunk(listeners, error); | ||
} | ||
} | ||
function createResolvedModelChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(RESOLVED_MODEL, value, null, response); | ||
function createResolvedIteratorResultChunk(response, value, done) { | ||
return new ReactPromise( | ||
"resolved_model", | ||
(done ? '{"done":true,"value":' : '{"done":false,"value":') + value + "}", | ||
null, | ||
response | ||
); | ||
} | ||
function createResolvedModuleChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(RESOLVED_MODULE, value, null, response); | ||
function resolveIteratorResultChunk(chunk, value, done) { | ||
resolveModelChunk( | ||
chunk, | ||
(done ? '{"done":true,"value":' : '{"done":false,"value":') + value + "}" | ||
); | ||
} | ||
function createInitializedTextChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(INITIALIZED, value, null, response); | ||
} | ||
function createInitializedBufferChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(INITIALIZED, value, null, response); | ||
} | ||
function resolveModelChunk(chunk, value) { | ||
if (chunk.status !== PENDING) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" !== chunk.status) chunk.reason.enqueueModel(value); | ||
else { | ||
var resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "resolved_model"; | ||
chunk.value = value; | ||
null !== resolveListeners && | ||
(initializeModelChunk(chunk), | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners)); | ||
} | ||
const resolveListeners = chunk.value; | ||
const rejectListeners = chunk.reason; | ||
const resolvedChunk = chunk; | ||
resolvedChunk.status = RESOLVED_MODEL; | ||
resolvedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
// This is unfortunate that we're reading this eagerly if | ||
// we already have listeners attached since they might no | ||
// longer be rendered or might not be the highest pri. | ||
initializeModelChunk(resolvedChunk); // The status might have changed after initialization. | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
} | ||
function resolveModuleChunk(chunk, value) { | ||
if (chunk.status !== PENDING && chunk.status !== BLOCKED) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" === chunk.status || "blocked" === chunk.status) { | ||
var resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "resolved_module"; | ||
chunk.value = value; | ||
null !== resolveListeners && | ||
(initializeModuleChunk(chunk), | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners)); | ||
} | ||
const resolveListeners = chunk.value; | ||
const rejectListeners = chunk.reason; | ||
const resolvedChunk = chunk; | ||
resolvedChunk.status = RESOLVED_MODULE; | ||
resolvedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
initializeModuleChunk(resolvedChunk); | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
} | ||
let initializingChunk = null; | ||
let initializingChunkBlockedModel = null; | ||
var initializingHandler = null; | ||
function initializeModelChunk(chunk) { | ||
const prevChunk = initializingChunk; | ||
const prevBlocked = initializingChunkBlockedModel; | ||
initializingChunk = chunk; | ||
initializingChunkBlockedModel = null; | ||
const resolvedModel = chunk.value; // We go to the CYCLIC state until we've fully resolved this. | ||
// We do this before parsing in case we try to initialize the same chunk | ||
// while parsing the model. Such as in a cyclic reference. | ||
const cyclicChunk = chunk; | ||
cyclicChunk.status = CYCLIC; | ||
cyclicChunk.value = null; | ||
cyclicChunk.reason = null; | ||
var prevHandler = initializingHandler; | ||
initializingHandler = null; | ||
var resolvedModel = chunk.value; | ||
chunk.status = "blocked"; | ||
chunk.value = null; | ||
chunk.reason = null; | ||
try { | ||
const value = parseModel(chunk._response, resolvedModel); | ||
if (initializingChunkBlockedModel !== null && initializingChunkBlockedModel.deps > 0) { | ||
initializingChunkBlockedModel.value = value; // We discovered new dependencies on modules that are not yet resolved. | ||
// We have to go the BLOCKED state until they're resolved. | ||
const blockedChunk = chunk; | ||
blockedChunk.status = BLOCKED; | ||
blockedChunk.value = null; | ||
blockedChunk.reason = null; | ||
} else { | ||
const resolveListeners = cyclicChunk.value; | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
wakeChunk(resolveListeners, value); | ||
var value = JSON.parse(resolvedModel, chunk._response._fromJSON), | ||
resolveListeners = chunk.value; | ||
null !== resolveListeners && | ||
((chunk.value = null), | ||
(chunk.reason = null), | ||
wakeChunk(resolveListeners, value)); | ||
if (null !== initializingHandler) { | ||
if (initializingHandler.errored) throw initializingHandler.value; | ||
if (0 < initializingHandler.deps) { | ||
initializingHandler.value = value; | ||
initializingHandler.chunk = chunk; | ||
return; | ||
} | ||
} | ||
chunk.status = "fulfilled"; | ||
chunk.value = value; | ||
} catch (error) { | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
(chunk.status = "rejected"), (chunk.reason = error); | ||
} finally { | ||
initializingChunk = prevChunk; | ||
initializingChunkBlockedModel = prevBlocked; | ||
initializingHandler = prevHandler; | ||
} | ||
} | ||
function initializeModuleChunk(chunk) { | ||
try { | ||
const value = requireModule(chunk.value); | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = value; | ||
var value = requireModule(chunk.value); | ||
chunk.status = "fulfilled"; | ||
chunk.value = value; | ||
} catch (error) { | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
(chunk.status = "rejected"), (chunk.reason = error); | ||
} | ||
} // Report that any missing chunks in the model is now going to throw this | ||
// error upon read. Also notify any pending promises. | ||
} | ||
function reportGlobalError(response, error) { | ||
response._chunks.forEach(chunk => { | ||
// If this chunk was already resolved or errored, it won't | ||
// trigger an error but if it wasn't then we need to | ||
// because we won't be getting any new data to resolve it. | ||
if (chunk.status === PENDING) { | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
response._chunks.forEach(function (chunk) { | ||
"pending" === chunk.status && triggerErrorOnChunk(chunk, error); | ||
}); | ||
} | ||
function createElement(type, key, props) { | ||
const element = { | ||
// This tag allows us to uniquely identify this as a React Element | ||
$$typeof: REACT_ELEMENT_TYPE, | ||
// Built-in properties that belong on the element | ||
type: type, | ||
key: key, | ||
ref: null, | ||
props: props, | ||
// Record the component responsible for creating this element. | ||
_owner: null | ||
}; | ||
return element; | ||
} | ||
function createLazyChunkWrapper(chunk) { | ||
const lazyType = { | ||
$$typeof: REACT_LAZY_TYPE, | ||
_payload: chunk, | ||
_init: readChunk | ||
}; | ||
return lazyType; | ||
return { $$typeof: REACT_LAZY_TYPE, _payload: chunk, _init: readChunk }; | ||
} | ||
function getChunk(response, id) { | ||
const chunks = response._chunks; | ||
let chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunk = createPendingChunk(response); | ||
chunks.set(id, chunk); | ||
} | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk || ((chunk = createPendingChunk(response)), chunks.set(id, chunk)); | ||
return chunk; | ||
} | ||
function createModelResolver(chunk, parentObject, key, cyclic) { | ||
let blocked; | ||
if (initializingChunkBlockedModel) { | ||
blocked = initializingChunkBlockedModel; | ||
if (!cyclic) { | ||
blocked.deps++; | ||
function waitForReference( | ||
referencedChunk, | ||
parentObject, | ||
key, | ||
response, | ||
map, | ||
path | ||
) { | ||
function fulfill(value) { | ||
for (var i = 1; i < path.length; i++) { | ||
for (; value.$$typeof === REACT_LAZY_TYPE; ) | ||
if (((value = value._payload), value === handler.chunk)) | ||
value = handler.value; | ||
else if ("fulfilled" === value.status) value = value.value; | ||
else { | ||
path.splice(0, i - 1); | ||
value.then(fulfill, reject); | ||
return; | ||
} | ||
value = value[path[i]]; | ||
} | ||
} else { | ||
blocked = initializingChunkBlockedModel = { | ||
deps: cyclic ? 0 : 1, | ||
value: null | ||
i = map(response, value, parentObject, key); | ||
parentObject[key] = i; | ||
"" === key && null === handler.value && (handler.value = i); | ||
if ( | ||
parentObject[0] === REACT_ELEMENT_TYPE && | ||
"object" === typeof handler.value && | ||
null !== handler.value && | ||
handler.value.$$typeof === REACT_ELEMENT_TYPE | ||
) | ||
switch (((value = handler.value), key)) { | ||
case "3": | ||
value.props = i; | ||
} | ||
handler.deps--; | ||
0 === handler.deps && | ||
((i = handler.chunk), | ||
null !== i && | ||
"blocked" === i.status && | ||
((value = i.value), | ||
(i.status = "fulfilled"), | ||
(i.value = handler.value), | ||
null !== value && wakeChunk(value, handler.value))); | ||
} | ||
function reject(error) { | ||
if (!handler.errored) { | ||
handler.errored = !0; | ||
handler.value = error; | ||
var chunk = handler.chunk; | ||
null !== chunk && | ||
"blocked" === chunk.status && | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
} | ||
if (initializingHandler) { | ||
var handler = initializingHandler; | ||
handler.deps++; | ||
} else | ||
handler = initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: null, | ||
deps: 1, | ||
errored: !1 | ||
}; | ||
} | ||
return value => { | ||
parentObject[key] = value; | ||
blocked.deps--; | ||
if (blocked.deps === 0) { | ||
if (chunk.status !== BLOCKED) { | ||
return; | ||
referencedChunk.then(fulfill, reject); | ||
return null; | ||
} | ||
function loadServerReference(response, metaData, parentObject, key) { | ||
if (!response._serverReferenceConfig) | ||
return createBoundServerReference( | ||
metaData, | ||
response._callServer, | ||
response._encodeFormAction | ||
); | ||
var serverReference = resolveServerReference( | ||
response._serverReferenceConfig, | ||
metaData.id | ||
); | ||
if ((response = preloadModule(serverReference))) | ||
metaData.bound && (response = Promise.all([response, metaData.bound])); | ||
else if (metaData.bound) response = Promise.resolve(metaData.bound); | ||
else return requireModule(serverReference); | ||
if (initializingHandler) { | ||
var handler = initializingHandler; | ||
handler.deps++; | ||
} else | ||
handler = initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: null, | ||
deps: 1, | ||
errored: !1 | ||
}; | ||
response.then( | ||
function () { | ||
var resolvedValue = requireModule(serverReference); | ||
if (metaData.bound) { | ||
var boundArgs = metaData.bound.value.slice(0); | ||
boundArgs.unshift(null); | ||
resolvedValue = resolvedValue.bind.apply(resolvedValue, boundArgs); | ||
} | ||
const resolveListeners = chunk.value; | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = blocked.value; | ||
if (resolveListeners !== null) { | ||
wakeChunk(resolveListeners, blocked.value); | ||
parentObject[key] = resolvedValue; | ||
"" === key && null === handler.value && (handler.value = resolvedValue); | ||
if ( | ||
parentObject[0] === REACT_ELEMENT_TYPE && | ||
"object" === typeof handler.value && | ||
null !== handler.value && | ||
handler.value.$$typeof === REACT_ELEMENT_TYPE | ||
) | ||
switch (((boundArgs = handler.value), key)) { | ||
case "3": | ||
boundArgs.props = resolvedValue; | ||
} | ||
handler.deps--; | ||
0 === handler.deps && | ||
((resolvedValue = handler.chunk), | ||
null !== resolvedValue && | ||
"blocked" === resolvedValue.status && | ||
((boundArgs = resolvedValue.value), | ||
(resolvedValue.status = "fulfilled"), | ||
(resolvedValue.value = handler.value), | ||
null !== boundArgs && wakeChunk(boundArgs, handler.value))); | ||
}, | ||
function (error) { | ||
if (!handler.errored) { | ||
handler.errored = !0; | ||
handler.value = error; | ||
var chunk = handler.chunk; | ||
null !== chunk && | ||
"blocked" === chunk.status && | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
} | ||
}; | ||
); | ||
return null; | ||
} | ||
function createModelReject(chunk) { | ||
return error => triggerErrorOnChunk(chunk, error); | ||
} | ||
function createServerReferenceProxy(response, metaData) { | ||
const callServer = response._callServer; | ||
const proxy = function () { | ||
// $FlowFixMe[method-unbinding] | ||
const args = Array.prototype.slice.call(arguments); | ||
const p = metaData.bound; | ||
if (!p) { | ||
return callServer(metaData.id, args); | ||
} | ||
if (p.status === INITIALIZED) { | ||
const bound = p.value; | ||
return callServer(metaData.id, bound.concat(args)); | ||
} // Since this is a fake Promise whose .then doesn't chain, we have to wrap it. | ||
// TODO: Remove the wrapper once that's fixed. | ||
return Promise.resolve(p).then(function (bound) { | ||
return callServer(metaData.id, bound.concat(args)); | ||
}); | ||
}; | ||
registerServerReference(proxy, metaData); | ||
return proxy; | ||
} | ||
function getOutlinedModel(response, id) { | ||
const chunk = getChunk(response, id); | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
function getOutlinedModel(response, reference, parentObject, key, map) { | ||
reference = reference.split(":"); | ||
var id = parseInt(reference[0], 16); | ||
id = getChunk(response, id); | ||
switch (id.status) { | ||
case "resolved_model": | ||
initializeModelChunk(id); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
{ | ||
return chunk.value; | ||
case "resolved_module": | ||
initializeModuleChunk(id); | ||
} | ||
switch (id.status) { | ||
case "fulfilled": | ||
var value = id.value; | ||
for (id = 1; id < reference.length; id++) { | ||
for (; value.$$typeof === REACT_LAZY_TYPE; ) | ||
if (((value = value._payload), "fulfilled" === value.status)) | ||
value = value.value; | ||
else | ||
return waitForReference( | ||
value, | ||
parentObject, | ||
key, | ||
response, | ||
map, | ||
reference.slice(id - 1) | ||
); | ||
value = value[reference[id]]; | ||
} | ||
// We always encode it first in the stream so it won't be pending. | ||
return map(response, value, parentObject, key); | ||
case "pending": | ||
case "blocked": | ||
return waitForReference(id, parentObject, key, response, map, reference); | ||
default: | ||
throw chunk.reason; | ||
return ( | ||
initializingHandler | ||
? ((initializingHandler.errored = !0), | ||
(initializingHandler.value = id.reason)) | ||
: (initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: id.reason, | ||
deps: 0, | ||
errored: !0 | ||
}), | ||
null | ||
); | ||
} | ||
} | ||
function createMap(response, model) { | ||
return new Map(model); | ||
} | ||
function createSet(response, model) { | ||
return new Set(model); | ||
} | ||
function createBlob(response, model) { | ||
return new Blob(model.slice(1), { type: model[0] }); | ||
} | ||
function createFormData(response, model) { | ||
response = new FormData(); | ||
for (var i = 0; i < model.length; i++) | ||
response.append(model[i][0], model[i][1]); | ||
return response; | ||
} | ||
function extractIterator(response, model) { | ||
return model[Symbol.iterator](); | ||
} | ||
function createModel(response, model) { | ||
return model; | ||
} | ||
function parseModelString(response, parentObject, key, value) { | ||
if (value[0] === '$') { | ||
if (value === '$') { | ||
// A very common symbol. | ||
return REACT_ELEMENT_TYPE; | ||
} | ||
if ("$" === value[0]) { | ||
if ("$" === value) | ||
return ( | ||
null !== initializingHandler && | ||
"0" === key && | ||
(initializingHandler = { | ||
parent: initializingHandler, | ||
chunk: null, | ||
value: null, | ||
deps: 0, | ||
errored: !1 | ||
}), | ||
REACT_ELEMENT_TYPE | ||
); | ||
switch (value[1]) { | ||
case '$': | ||
{ | ||
// This was an escaped string value. | ||
return value.slice(1); | ||
} | ||
case 'L': | ||
{ | ||
// Lazy node | ||
const id = parseInt(value.slice(2), 16); | ||
const chunk = getChunk(response, id); // We create a React.lazy wrapper around any lazy values. | ||
// When passed into React, we'll know how to suspend on this. | ||
return createLazyChunkWrapper(chunk); | ||
} | ||
case '@': | ||
{ | ||
// Promise | ||
const id = parseInt(value.slice(2), 16); | ||
const chunk = getChunk(response, id); | ||
return chunk; | ||
} | ||
case 'S': | ||
{ | ||
// Symbol | ||
return Symbol.for(value.slice(2)); | ||
} | ||
case 'F': | ||
{ | ||
// Server Reference | ||
const id = parseInt(value.slice(2), 16); | ||
const metadata = getOutlinedModel(response, id); | ||
return createServerReferenceProxy(response, metadata); | ||
} | ||
case 'Q': | ||
{ | ||
// Map | ||
const id = parseInt(value.slice(2), 16); | ||
const data = getOutlinedModel(response, id); | ||
return new Map(data); | ||
} | ||
case 'W': | ||
{ | ||
// Set | ||
const id = parseInt(value.slice(2), 16); | ||
const data = getOutlinedModel(response, id); | ||
return new Set(data); | ||
} | ||
case 'I': | ||
{ | ||
// $Infinity | ||
return Infinity; | ||
} | ||
case '-': | ||
{ | ||
// $-0 or $-Infinity | ||
if (value === '$-0') { | ||
return -0; | ||
} else { | ||
return -Infinity; | ||
} | ||
} | ||
case 'N': | ||
{ | ||
// $NaN | ||
return NaN; | ||
} | ||
case 'u': | ||
{ | ||
// matches "$undefined" | ||
// Special encoding for `undefined` which can't be serialized as JSON otherwise. | ||
return undefined; | ||
} | ||
case 'D': | ||
{ | ||
// Date | ||
return new Date(Date.parse(value.slice(2))); | ||
} | ||
case 'n': | ||
{ | ||
// BigInt | ||
return BigInt(value.slice(2)); | ||
} | ||
case "$": | ||
return value.slice(1); | ||
case "L": | ||
return ( | ||
(parentObject = parseInt(value.slice(2), 16)), | ||
(response = getChunk(response, parentObject)), | ||
createLazyChunkWrapper(response) | ||
); | ||
case "@": | ||
if (2 === value.length) return new Promise(function () {}); | ||
parentObject = parseInt(value.slice(2), 16); | ||
return getChunk(response, parentObject); | ||
case "S": | ||
return Symbol.for(value.slice(2)); | ||
case "F": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel( | ||
response, | ||
value, | ||
parentObject, | ||
key, | ||
loadServerReference | ||
) | ||
); | ||
case "T": | ||
parentObject = "$" + value.slice(2); | ||
response = response._tempRefs; | ||
if (null == response) | ||
throw Error( | ||
"Missing a temporary reference set but the RSC response returned a temporary reference. Pass a temporaryReference option with the set that was used with the reply." | ||
); | ||
return response.get(parentObject); | ||
case "Q": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createMap) | ||
); | ||
case "W": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createSet) | ||
); | ||
case "B": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createBlob) | ||
); | ||
case "K": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createFormData) | ||
); | ||
case "Z": | ||
return resolveErrorProd(); | ||
case "i": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, extractIterator) | ||
); | ||
case "I": | ||
return Infinity; | ||
case "-": | ||
return "$-0" === value ? -0 : -Infinity; | ||
case "N": | ||
return NaN; | ||
case "u": | ||
return; | ||
case "D": | ||
return new Date(Date.parse(value.slice(2))); | ||
case "n": | ||
return BigInt(value.slice(2)); | ||
default: | ||
{ | ||
// We assume that anything else is a reference ID. | ||
const id = parseInt(value.slice(1), 16); | ||
const chunk = getChunk(response, id); | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
break; | ||
case RESOLVED_MODULE: | ||
initializeModuleChunk(chunk); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
const chunkValue = chunk.value; | ||
return chunkValue; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
const parentChunk = initializingChunk; | ||
chunk.then(createModelResolver(parentChunk, parentObject, key, chunk.status === CYCLIC), createModelReject(parentChunk)); | ||
return null; | ||
default: | ||
throw chunk.reason; | ||
} | ||
} | ||
return ( | ||
(value = value.slice(1)), | ||
getOutlinedModel(response, value, parentObject, key, createModel) | ||
); | ||
} | ||
} | ||
return value; | ||
} | ||
function parseModelTuple(response, value) { | ||
const tuple = value; | ||
if (tuple[0] === REACT_ELEMENT_TYPE) { | ||
// TODO: Consider having React just directly accept these arrays as elements. | ||
// Or even change the ReactElement type to be an array. | ||
return createElement(tuple[1], tuple[2], tuple[3]); | ||
} | ||
return value; | ||
} | ||
function missingCall() { | ||
throw new Error('Trying to call a function from "use server" but the callServer option ' + 'was not implemented in your router runtime.'); | ||
throw Error( | ||
'Trying to call a function from "use server" but the callServer option was not implemented in your router runtime.' | ||
); | ||
} | ||
function createResponse(bundlerConfig, moduleLoading, callServer, nonce) { | ||
const chunks = new Map(); | ||
const response = { | ||
_bundlerConfig: bundlerConfig, | ||
_moduleLoading: moduleLoading, | ||
_callServer: callServer !== undefined ? callServer : missingCall, | ||
_nonce: nonce, | ||
_chunks: chunks, | ||
_stringDecoder: createStringDecoder(), | ||
_fromJSON: null, | ||
_rowState: 0, | ||
_rowID: 0, | ||
_rowTag: 0, | ||
_rowLength: 0, | ||
_buffer: [] | ||
}; // Don't inline this call because it causes closure to outline the call above. | ||
response._fromJSON = createFromJSONCallback(response); | ||
return response; | ||
function ResponseInstance( | ||
bundlerConfig, | ||
serverReferenceConfig, | ||
moduleLoading, | ||
callServer, | ||
encodeFormAction, | ||
nonce, | ||
temporaryReferences | ||
) { | ||
var chunks = new Map(); | ||
this._bundlerConfig = bundlerConfig; | ||
this._serverReferenceConfig = serverReferenceConfig; | ||
this._moduleLoading = moduleLoading; | ||
this._callServer = void 0 !== callServer ? callServer : missingCall; | ||
this._encodeFormAction = encodeFormAction; | ||
this._nonce = nonce; | ||
this._chunks = chunks; | ||
this._stringDecoder = new util.TextDecoder(); | ||
this._fromJSON = null; | ||
this._rowLength = this._rowTag = this._rowID = this._rowState = 0; | ||
this._buffer = []; | ||
this._tempRefs = temporaryReferences; | ||
this._fromJSON = createFromJSONCallback(this); | ||
} | ||
function resolveModel(response, id, model) { | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createResolvedModelChunk(response, model)); | ||
} else { | ||
resolveModelChunk(chunk, model); | ||
} | ||
function resolveBuffer(response, id, buffer) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk && "pending" !== chunk.status | ||
? chunk.reason.enqueueValue(buffer) | ||
: chunks.set(id, new ReactPromise("fulfilled", buffer, null, response)); | ||
} | ||
function resolveText(response, id, text) { | ||
const chunks = response._chunks; // We assume that we always reference large strings after they've been | ||
// emitted. | ||
chunks.set(id, createInitializedTextChunk(response, text)); | ||
function resolveModule(response, id, model) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
model = JSON.parse(model, response._fromJSON); | ||
var clientReference = resolveClientReference(response._bundlerConfig, model); | ||
prepareDestinationWithChunks( | ||
response._moduleLoading, | ||
model[1], | ||
response._nonce | ||
); | ||
if ((model = preloadModule(clientReference))) { | ||
if (chunk) { | ||
var blockedChunk = chunk; | ||
blockedChunk.status = "blocked"; | ||
} else | ||
(blockedChunk = new ReactPromise("blocked", null, null, response)), | ||
chunks.set(id, blockedChunk); | ||
model.then( | ||
function () { | ||
return resolveModuleChunk(blockedChunk, clientReference); | ||
}, | ||
function (error) { | ||
return triggerErrorOnChunk(blockedChunk, error); | ||
} | ||
); | ||
} else | ||
chunk | ||
? resolveModuleChunk(chunk, clientReference) | ||
: chunks.set( | ||
id, | ||
new ReactPromise("resolved_module", clientReference, null, response) | ||
); | ||
} | ||
function resolveBuffer(response, id, buffer) { | ||
const chunks = response._chunks; // We assume that we always reference buffers after they've been emitted. | ||
chunks.set(id, createInitializedBufferChunk(response, buffer)); | ||
function resolveStream(response, id, stream, controller) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk | ||
? "pending" === chunk.status && | ||
((response = chunk.value), | ||
(chunk.status = "fulfilled"), | ||
(chunk.value = stream), | ||
(chunk.reason = controller), | ||
null !== response && wakeChunk(response, chunk.value)) | ||
: chunks.set( | ||
id, | ||
new ReactPromise("fulfilled", stream, controller, response) | ||
); | ||
} | ||
function resolveModule(response, id, model) { | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
const clientReferenceMetadata = parseModel(response, model); | ||
const clientReference = resolveClientReference(response._bundlerConfig, clientReferenceMetadata); | ||
prepareDestinationForModule(response._moduleLoading, response._nonce, clientReferenceMetadata); // TODO: Add an option to encode modules that are lazy loaded. | ||
// For now we preload all modules as early as possible since it's likely | ||
// that we'll need them. | ||
const promise = preloadModule(clientReference); | ||
if (promise) { | ||
let blockedChunk; | ||
if (!chunk) { | ||
// Technically, we should just treat promise as the chunk in this | ||
// case. Because it'll just behave as any other promise. | ||
blockedChunk = createBlockedChunk(response); | ||
chunks.set(id, blockedChunk); | ||
} else { | ||
// This can't actually happen because we don't have any forward | ||
// references to modules. | ||
blockedChunk = chunk; | ||
blockedChunk.status = BLOCKED; | ||
function startReadableStream(response, id, type) { | ||
var controller = null; | ||
type = new ReadableStream({ | ||
type: type, | ||
start: function (c) { | ||
controller = c; | ||
} | ||
promise.then(() => resolveModuleChunk(blockedChunk, clientReference), error => triggerErrorOnChunk(blockedChunk, error)); | ||
} else { | ||
if (!chunk) { | ||
chunks.set(id, createResolvedModuleChunk(response, clientReference)); | ||
} else { | ||
// This can't actually happen because we don't have any forward | ||
// references to modules. | ||
resolveModuleChunk(chunk, clientReference); | ||
}); | ||
var previousBlockedChunk = null; | ||
resolveStream(response, id, type, { | ||
enqueueValue: function (value) { | ||
null === previousBlockedChunk | ||
? controller.enqueue(value) | ||
: previousBlockedChunk.then(function () { | ||
controller.enqueue(value); | ||
}); | ||
}, | ||
enqueueModel: function (json) { | ||
if (null === previousBlockedChunk) { | ||
var chunk = new ReactPromise("resolved_model", json, null, response); | ||
initializeModelChunk(chunk); | ||
"fulfilled" === chunk.status | ||
? controller.enqueue(chunk.value) | ||
: (chunk.then( | ||
function (v) { | ||
return controller.enqueue(v); | ||
}, | ||
function (e) { | ||
return controller.error(e); | ||
} | ||
), | ||
(previousBlockedChunk = chunk)); | ||
} else { | ||
chunk = previousBlockedChunk; | ||
var chunk$52 = createPendingChunk(response); | ||
chunk$52.then( | ||
function (v) { | ||
return controller.enqueue(v); | ||
}, | ||
function (e) { | ||
return controller.error(e); | ||
} | ||
); | ||
previousBlockedChunk = chunk$52; | ||
chunk.then(function () { | ||
previousBlockedChunk === chunk$52 && (previousBlockedChunk = null); | ||
resolveModelChunk(chunk$52, json); | ||
}); | ||
} | ||
}, | ||
close: function () { | ||
if (null === previousBlockedChunk) controller.close(); | ||
else { | ||
var blockedChunk = previousBlockedChunk; | ||
previousBlockedChunk = null; | ||
blockedChunk.then(function () { | ||
return controller.close(); | ||
}); | ||
} | ||
}, | ||
error: function (error) { | ||
if (null === previousBlockedChunk) controller.error(error); | ||
else { | ||
var blockedChunk = previousBlockedChunk; | ||
previousBlockedChunk = null; | ||
blockedChunk.then(function () { | ||
return controller.error(error); | ||
}); | ||
} | ||
} | ||
} | ||
}); | ||
} | ||
function resolveErrorProd(response, id, digest) { | ||
const error = new Error('An error occurred in the Server Components render. The specific message is omitted in production' + ' builds to avoid leaking sensitive details. A digest property is included on this error instance which' + ' may provide additional details about the nature of the error.'); | ||
error.stack = 'Error: ' + error.message; | ||
error.digest = digest; | ||
const errorWithDigest = error; | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createErrorChunk(response, errorWithDigest)); | ||
} else { | ||
triggerErrorOnChunk(chunk, errorWithDigest); | ||
} | ||
function asyncIterator() { | ||
return this; | ||
} | ||
function resolvePostponeProd(response, id) { | ||
const error = new Error('A Server Component was postponed. The reason is omitted in production' + ' builds to avoid leaking sensitive details.'); | ||
const postponeInstance = error; | ||
postponeInstance.$$typeof = REACT_POSTPONE_TYPE; | ||
postponeInstance.stack = 'Error: ' + error.message; | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createErrorChunk(response, postponeInstance)); | ||
} else { | ||
triggerErrorOnChunk(chunk, postponeInstance); | ||
} | ||
function createIterator(next) { | ||
next = { next: next }; | ||
next[ASYNC_ITERATOR] = asyncIterator; | ||
return next; | ||
} | ||
function startAsyncIterable(response, id, iterator) { | ||
var buffer = [], | ||
closed = !1, | ||
nextWriteIndex = 0, | ||
$jscomp$compprop0 = {}; | ||
$jscomp$compprop0 = | ||
(($jscomp$compprop0[ASYNC_ITERATOR] = function () { | ||
var nextReadIndex = 0; | ||
return createIterator(function (arg) { | ||
if (void 0 !== arg) | ||
throw Error( | ||
"Values cannot be passed to next() of AsyncIterables passed to Client Components." | ||
); | ||
if (nextReadIndex === buffer.length) { | ||
if (closed) | ||
return new ReactPromise( | ||
"fulfilled", | ||
{ done: !0, value: void 0 }, | ||
null, | ||
response | ||
); | ||
buffer[nextReadIndex] = createPendingChunk(response); | ||
} | ||
return buffer[nextReadIndex++]; | ||
}); | ||
}), | ||
$jscomp$compprop0); | ||
resolveStream( | ||
response, | ||
id, | ||
iterator ? $jscomp$compprop0[ASYNC_ITERATOR]() : $jscomp$compprop0, | ||
{ | ||
enqueueValue: function (value) { | ||
if (nextWriteIndex === buffer.length) | ||
buffer[nextWriteIndex] = new ReactPromise( | ||
"fulfilled", | ||
{ done: !1, value: value }, | ||
null, | ||
response | ||
); | ||
else { | ||
var chunk = buffer[nextWriteIndex], | ||
resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "fulfilled"; | ||
chunk.value = { done: !1, value: value }; | ||
null !== resolveListeners && | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
nextWriteIndex++; | ||
}, | ||
enqueueModel: function (value) { | ||
nextWriteIndex === buffer.length | ||
? (buffer[nextWriteIndex] = createResolvedIteratorResultChunk( | ||
response, | ||
value, | ||
!1 | ||
)) | ||
: resolveIteratorResultChunk(buffer[nextWriteIndex], value, !1); | ||
nextWriteIndex++; | ||
}, | ||
close: function (value) { | ||
closed = !0; | ||
nextWriteIndex === buffer.length | ||
? (buffer[nextWriteIndex] = createResolvedIteratorResultChunk( | ||
response, | ||
value, | ||
!0 | ||
)) | ||
: resolveIteratorResultChunk(buffer[nextWriteIndex], value, !0); | ||
for (nextWriteIndex++; nextWriteIndex < buffer.length; ) | ||
resolveIteratorResultChunk( | ||
buffer[nextWriteIndex++], | ||
'"$undefined"', | ||
!0 | ||
); | ||
}, | ||
error: function (error) { | ||
closed = !0; | ||
for ( | ||
nextWriteIndex === buffer.length && | ||
(buffer[nextWriteIndex] = createPendingChunk(response)); | ||
nextWriteIndex < buffer.length; | ||
function resolveHint(response, code, model) { | ||
const hintModel = parseModel(response, model); | ||
dispatchHint(code, hintModel); | ||
) | ||
triggerErrorOnChunk(buffer[nextWriteIndex++], error); | ||
} | ||
} | ||
); | ||
} | ||
function resolveErrorProd() { | ||
var error = Error( | ||
"An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error." | ||
); | ||
error.stack = "Error: " + error.message; | ||
return error; | ||
} | ||
function mergeBuffer(buffer, lastChunk) { | ||
const l = buffer.length; // Count the bytes we'll need | ||
let byteLength = lastChunk.length; | ||
for (let i = 0; i < l; i++) { | ||
for (var l = buffer.length, byteLength = lastChunk.length, i = 0; i < l; i++) | ||
byteLength += buffer[i].byteLength; | ||
} // Allocate enough contiguous space | ||
const result = new Uint8Array(byteLength); | ||
let offset = 0; // Copy all the buffers into it. | ||
for (let i = 0; i < l; i++) { | ||
const chunk = buffer[i]; | ||
result.set(chunk, offset); | ||
offset += chunk.byteLength; | ||
byteLength = new Uint8Array(byteLength); | ||
for (var i$53 = (i = 0); i$53 < l; i$53++) { | ||
var chunk = buffer[i$53]; | ||
byteLength.set(chunk, i); | ||
i += chunk.byteLength; | ||
} | ||
result.set(lastChunk, offset); | ||
return result; | ||
byteLength.set(lastChunk, i); | ||
return byteLength; | ||
} | ||
function resolveTypedArray(response, id, buffer, lastChunk, constructor, bytesPerElement) { | ||
// If the view fits into one original buffer, we just reuse that buffer instead of | ||
// copying it out to a separate copy. This means that it's not always possible to | ||
// transfer these values to other threads without copying first since they may | ||
// share array buffer. For this to work, it must also have bytes aligned to a | ||
// multiple of a size of the type. | ||
const chunk = buffer.length === 0 && lastChunk.byteOffset % bytesPerElement === 0 ? lastChunk : mergeBuffer(buffer, lastChunk); // TODO: The transfer protocol of RSC is little-endian. If the client isn't little-endian | ||
// we should convert it instead. In practice big endian isn't really Web compatible so it's | ||
// somewhat safe to assume that browsers aren't going to run it, but maybe there's some SSR | ||
// server that's affected. | ||
const view = new constructor(chunk.buffer, chunk.byteOffset, chunk.byteLength / bytesPerElement); | ||
resolveBuffer(response, id, view); | ||
function resolveTypedArray( | ||
response, | ||
id, | ||
buffer, | ||
lastChunk, | ||
constructor, | ||
bytesPerElement | ||
) { | ||
buffer = | ||
0 === buffer.length && 0 === lastChunk.byteOffset % bytesPerElement | ||
? lastChunk | ||
: mergeBuffer(buffer, lastChunk); | ||
constructor = new constructor( | ||
buffer.buffer, | ||
buffer.byteOffset, | ||
buffer.byteLength / bytesPerElement | ||
); | ||
resolveBuffer(response, id, constructor); | ||
} | ||
function processFullRow(response, id, tag, buffer, chunk) { | ||
{ | ||
switch (tag) { | ||
case 65 | ||
/* "A" */ | ||
: | ||
// We must always clone to extract it into a separate buffer instead of just a view. | ||
resolveBuffer(response, id, mergeBuffer(buffer, chunk).buffer); | ||
return; | ||
case 67 | ||
/* "C" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int8Array, 1); | ||
return; | ||
case 99 | ||
/* "c" */ | ||
: | ||
resolveBuffer(response, id, buffer.length === 0 ? chunk : mergeBuffer(buffer, chunk)); | ||
return; | ||
case 85 | ||
/* "U" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint8ClampedArray, 1); | ||
return; | ||
case 83 | ||
/* "S" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int16Array, 2); | ||
return; | ||
case 115 | ||
/* "s" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint16Array, 2); | ||
return; | ||
case 76 | ||
/* "L" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int32Array, 4); | ||
return; | ||
case 108 | ||
/* "l" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint32Array, 4); | ||
return; | ||
case 70 | ||
/* "F" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Float32Array, 4); | ||
return; | ||
case 100 | ||
/* "d" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Float64Array, 8); | ||
return; | ||
case 78 | ||
/* "N" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, BigInt64Array, 8); | ||
return; | ||
case 109 | ||
/* "m" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, BigUint64Array, 8); | ||
return; | ||
case 86 | ||
/* "V" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, DataView, 1); | ||
return; | ||
} | ||
} | ||
const stringDecoder = response._stringDecoder; | ||
let row = ''; | ||
for (let i = 0; i < buffer.length; i++) { | ||
row += readPartialStringChunk(stringDecoder, buffer[i]); | ||
} | ||
row += readFinalStringChunk(stringDecoder, chunk); | ||
function processFullBinaryRow(response, id, tag, buffer, chunk) { | ||
switch (tag) { | ||
case 73 | ||
/* "I" */ | ||
: | ||
{ | ||
resolveModule(response, id, row); | ||
return; | ||
} | ||
case 72 | ||
/* "H" */ | ||
: | ||
{ | ||
const code = row[0]; | ||
resolveHint(response, code, row.slice(1)); | ||
return; | ||
} | ||
case 69 | ||
/* "E" */ | ||
: | ||
{ | ||
const errorInfo = JSON.parse(row); | ||
{ | ||
resolveErrorProd(response, id, errorInfo.digest); | ||
} | ||
return; | ||
} | ||
case 84 | ||
/* "T" */ | ||
: | ||
{ | ||
resolveText(response, id, row); | ||
return; | ||
} | ||
case 68 | ||
/* "D" */ | ||
: | ||
{ | ||
throw new Error('Failed to read a RSC payload created by a development version of React ' + 'on the server while using a production version on the client. Always use ' + 'matching versions on the server and the client.'); | ||
} | ||
case 80 | ||
/* "P" */ | ||
: | ||
{ | ||
{ | ||
{ | ||
resolvePostponeProd(response, id); | ||
} | ||
return; | ||
} | ||
} | ||
// Fallthrough | ||
default: | ||
/* """ "{" "[" "t" "f" "n" "0" - "9" */ | ||
{ | ||
// We assume anything else is JSON. | ||
resolveModel(response, id, row); | ||
return; | ||
} | ||
case 65: | ||
resolveBuffer(response, id, mergeBuffer(buffer, chunk).buffer); | ||
return; | ||
case 79: | ||
resolveTypedArray(response, id, buffer, chunk, Int8Array, 1); | ||
return; | ||
case 111: | ||
resolveBuffer( | ||
response, | ||
id, | ||
0 === buffer.length ? chunk : mergeBuffer(buffer, chunk) | ||
); | ||
return; | ||
case 85: | ||
resolveTypedArray(response, id, buffer, chunk, Uint8ClampedArray, 1); | ||
return; | ||
case 83: | ||
resolveTypedArray(response, id, buffer, chunk, Int16Array, 2); | ||
return; | ||
case 115: | ||
resolveTypedArray(response, id, buffer, chunk, Uint16Array, 2); | ||
return; | ||
case 76: | ||
resolveTypedArray(response, id, buffer, chunk, Int32Array, 4); | ||
return; | ||
case 108: | ||
resolveTypedArray(response, id, buffer, chunk, Uint32Array, 4); | ||
return; | ||
case 71: | ||
resolveTypedArray(response, id, buffer, chunk, Float32Array, 4); | ||
return; | ||
case 103: | ||
resolveTypedArray(response, id, buffer, chunk, Float64Array, 8); | ||
return; | ||
case 77: | ||
resolveTypedArray(response, id, buffer, chunk, BigInt64Array, 8); | ||
return; | ||
case 109: | ||
resolveTypedArray(response, id, buffer, chunk, BigUint64Array, 8); | ||
return; | ||
case 86: | ||
resolveTypedArray(response, id, buffer, chunk, DataView, 1); | ||
return; | ||
} | ||
for ( | ||
var stringDecoder = response._stringDecoder, row = "", i = 0; | ||
i < buffer.length; | ||
i++ | ||
) | ||
row += stringDecoder.decode(buffer[i], decoderOptions); | ||
row += stringDecoder.decode(chunk); | ||
processFullStringRow(response, id, tag, row); | ||
} | ||
function processBinaryChunk(response, chunk) { | ||
let i = 0; | ||
let rowState = response._rowState; | ||
let rowID = response._rowID; | ||
let rowTag = response._rowTag; | ||
let rowLength = response._rowLength; | ||
const buffer = response._buffer; | ||
const chunkLength = chunk.length; | ||
while (i < chunkLength) { | ||
let lastIdx = -1; | ||
switch (rowState) { | ||
case ROW_ID: | ||
{ | ||
const byte = chunk[i++]; | ||
if (byte === 58 | ||
/* ":" */ | ||
) { | ||
// Finished the rowID, next we'll parse the tag. | ||
rowState = ROW_TAG; | ||
} else { | ||
rowID = rowID << 4 | (byte > 96 ? byte - 87 : byte - 48); | ||
} | ||
continue; | ||
} | ||
case ROW_TAG: | ||
{ | ||
const resolvedRowTag = chunk[i]; | ||
if (resolvedRowTag === 84 | ||
/* "T" */ | ||
|| (resolvedRowTag === 65 | ||
/* "A" */ | ||
|| resolvedRowTag === 67 | ||
/* "C" */ | ||
|| resolvedRowTag === 99 | ||
/* "c" */ | ||
|| resolvedRowTag === 85 | ||
/* "U" */ | ||
|| resolvedRowTag === 83 | ||
/* "S" */ | ||
|| resolvedRowTag === 115 | ||
/* "s" */ | ||
|| resolvedRowTag === 76 | ||
/* "L" */ | ||
|| resolvedRowTag === 108 | ||
/* "l" */ | ||
|| resolvedRowTag === 70 | ||
/* "F" */ | ||
|| resolvedRowTag === 100 | ||
/* "d" */ | ||
|| resolvedRowTag === 78 | ||
/* "N" */ | ||
|| resolvedRowTag === 109 | ||
/* "m" */ | ||
|| resolvedRowTag === 86) | ||
/* "V" */ | ||
) { | ||
rowTag = resolvedRowTag; | ||
rowState = ROW_LENGTH; | ||
i++; | ||
} else if (resolvedRowTag > 64 && resolvedRowTag < 91 | ||
/* "A"-"Z" */ | ||
) { | ||
rowTag = resolvedRowTag; | ||
rowState = ROW_CHUNK_BY_NEWLINE; | ||
i++; | ||
} else { | ||
rowTag = 0; | ||
rowState = ROW_CHUNK_BY_NEWLINE; // This was an unknown tag so it was probably part of the data. | ||
} | ||
continue; | ||
} | ||
case ROW_LENGTH: | ||
{ | ||
const byte = chunk[i++]; | ||
if (byte === 44 | ||
/* "," */ | ||
) { | ||
// Finished the rowLength, next we'll buffer up to that length. | ||
rowState = ROW_CHUNK_BY_LENGTH; | ||
} else { | ||
rowLength = rowLength << 4 | (byte > 96 ? byte - 87 : byte - 48); | ||
} | ||
continue; | ||
} | ||
case ROW_CHUNK_BY_NEWLINE: | ||
{ | ||
// We're looking for a newline | ||
lastIdx = chunk.indexOf(10 | ||
/* "\n" */ | ||
, i); | ||
function processFullStringRow(response, id, tag, row) { | ||
switch (tag) { | ||
case 73: | ||
resolveModule(response, id, row); | ||
break; | ||
case 72: | ||
id = row[0]; | ||
row = row.slice(1); | ||
response = JSON.parse(row, response._fromJSON); | ||
row = ReactDOMSharedInternals.d; | ||
switch (id) { | ||
case "D": | ||
row.D(response); | ||
break; | ||
} | ||
case ROW_CHUNK_BY_LENGTH: | ||
{ | ||
// We're looking for the remaining byte length | ||
lastIdx = i + rowLength; | ||
if (lastIdx > chunk.length) { | ||
lastIdx = -1; | ||
} | ||
case "C": | ||
"string" === typeof response | ||
? row.C(response) | ||
: row.C(response[0], response[1]); | ||
break; | ||
} | ||
} | ||
const offset = chunk.byteOffset + i; | ||
if (lastIdx > -1) { | ||
// We found the last chunk of the row | ||
const length = lastIdx - i; | ||
const lastChunk = new Uint8Array(chunk.buffer, offset, length); | ||
processFullRow(response, rowID, rowTag, buffer, lastChunk); // Reset state machine for a new row | ||
i = lastIdx; | ||
if (rowState === ROW_CHUNK_BY_NEWLINE) { | ||
// If we're trailing by a newline we need to skip it. | ||
i++; | ||
case "L": | ||
id = response[0]; | ||
tag = response[1]; | ||
3 === response.length ? row.L(id, tag, response[2]) : row.L(id, tag); | ||
break; | ||
case "m": | ||
"string" === typeof response | ||
? row.m(response) | ||
: row.m(response[0], response[1]); | ||
break; | ||
case "X": | ||
"string" === typeof response | ||
? row.X(response) | ||
: row.X(response[0], response[1]); | ||
break; | ||
case "S": | ||
"string" === typeof response | ||
? row.S(response) | ||
: row.S( | ||
response[0], | ||
0 === response[1] ? void 0 : response[1], | ||
3 === response.length ? response[2] : void 0 | ||
); | ||
break; | ||
case "M": | ||
"string" === typeof response | ||
? row.M(response) | ||
: row.M(response[0], response[1]); | ||
} | ||
rowState = ROW_ID; | ||
rowTag = 0; | ||
rowID = 0; | ||
rowLength = 0; | ||
buffer.length = 0; | ||
} else { | ||
// The rest of this row is in a future chunk. We stash the rest of the | ||
// current chunk until we can process the full row. | ||
const length = chunk.byteLength - i; | ||
const remainingSlice = new Uint8Array(chunk.buffer, offset, length); | ||
buffer.push(remainingSlice); // Update how many bytes we're still waiting for. If we're looking for | ||
// a newline, this doesn't hurt since we'll just ignore it. | ||
rowLength -= remainingSlice.byteLength; | ||
break; | ||
} | ||
case 69: | ||
tag = JSON.parse(row); | ||
row = resolveErrorProd(); | ||
row.digest = tag.digest; | ||
tag = response._chunks; | ||
var chunk = tag.get(id); | ||
chunk | ||
? triggerErrorOnChunk(chunk, row) | ||
: tag.set(id, new ReactPromise("rejected", null, row, response)); | ||
break; | ||
case 84: | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) && "pending" !== chunk.status | ||
? chunk.reason.enqueueValue(row) | ||
: tag.set(id, new ReactPromise("fulfilled", row, null, response)); | ||
break; | ||
case 78: | ||
case 68: | ||
case 87: | ||
throw Error( | ||
"Failed to read a RSC payload created by a development version of React on the server while using a production version on the client. Always use matching versions on the server and the client." | ||
); | ||
case 82: | ||
startReadableStream(response, id, void 0); | ||
break; | ||
case 114: | ||
startReadableStream(response, id, "bytes"); | ||
break; | ||
case 88: | ||
startAsyncIterable(response, id, !1); | ||
break; | ||
case 120: | ||
startAsyncIterable(response, id, !0); | ||
break; | ||
case 67: | ||
(response = response._chunks.get(id)) && | ||
"fulfilled" === response.status && | ||
response.reason.close("" === row ? '"$undefined"' : row); | ||
break; | ||
case 80: | ||
row = Error( | ||
"A Server Component was postponed. The reason is omitted in production builds to avoid leaking sensitive details." | ||
); | ||
row.$$typeof = REACT_POSTPONE_TYPE; | ||
row.stack = "Error: " + row.message; | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) | ||
? triggerErrorOnChunk(chunk, row) | ||
: tag.set(id, new ReactPromise("rejected", null, row, response)); | ||
break; | ||
default: | ||
(tag = response._chunks), | ||
(chunk = tag.get(id)) | ||
? resolveModelChunk(chunk, row) | ||
: tag.set( | ||
id, | ||
new ReactPromise("resolved_model", row, null, response) | ||
); | ||
} | ||
response._rowState = rowState; | ||
response._rowID = rowID; | ||
response._rowTag = rowTag; | ||
response._rowLength = rowLength; | ||
} | ||
function parseModel(response, json) { | ||
return JSON.parse(json, response._fromJSON); | ||
} | ||
function createFromJSONCallback(response) { | ||
// $FlowFixMe[missing-this-annot] | ||
return function (key, value) { | ||
if (typeof value === 'string') { | ||
// We can't use .bind here because we need the "this" value. | ||
if ("string" === typeof value) | ||
return parseModelString(response, this, key, value); | ||
if ("object" === typeof value && null !== value) { | ||
if (value[0] === REACT_ELEMENT_TYPE) { | ||
if ( | ||
((key = { | ||
$$typeof: REACT_ELEMENT_TYPE, | ||
type: value[1], | ||
key: value[2], | ||
ref: null, | ||
props: value[3] | ||
}), | ||
null !== initializingHandler) | ||
) | ||
if ( | ||
((value = initializingHandler), | ||
(initializingHandler = value.parent), | ||
value.errored) | ||
) | ||
(key = new ReactPromise("rejected", null, value.value, response)), | ||
(key = createLazyChunkWrapper(key)); | ||
else if (0 < value.deps) { | ||
var blockedChunk = new ReactPromise( | ||
"blocked", | ||
null, | ||
null, | ||
response | ||
); | ||
value.value = key; | ||
value.chunk = blockedChunk; | ||
key = createLazyChunkWrapper(blockedChunk); | ||
} | ||
} else key = value; | ||
return key; | ||
} | ||
if (typeof value === 'object' && value !== null) { | ||
return parseModelTuple(response, value); | ||
} | ||
return value; | ||
}; | ||
} | ||
function close(response) { | ||
// In case there are any remaining unresolved chunks, they won't | ||
// be resolved now. So we need to issue an error to those. | ||
// Ideally we should be able to early bail out if we kept a | ||
// ref count of pending chunks. | ||
reportGlobalError(response, new Error('Connection closed.')); | ||
} | ||
function noServerCall() { | ||
throw new Error('Server Functions cannot be called during initial render. ' + 'This would create a fetch waterfall. Try to use a Server Component ' + 'to pass data to Client Components instead.'); | ||
throw Error( | ||
"Server Functions cannot be called during initial render. This would create a fetch waterfall. Try to use a Server Component to pass data to Client Components instead." | ||
); | ||
} | ||
exports.createFromNodeStream = function ( | ||
stream, | ||
serverConsumerManifest, | ||
options | ||
) { | ||
var response = new ResponseInstance( | ||
serverConsumerManifest.moduleMap, | ||
serverConsumerManifest.serverModuleMap, | ||
serverConsumerManifest.moduleLoading, | ||
noServerCall, | ||
options ? options.encodeFormAction : void 0, | ||
options && "string" === typeof options.nonce ? options.nonce : void 0, | ||
void 0 | ||
); | ||
stream.on("data", function (chunk) { | ||
if ("string" === typeof chunk) { | ||
for ( | ||
var i = 0, | ||
rowState = response._rowState, | ||
rowID = response._rowID, | ||
rowTag = response._rowTag, | ||
rowLength = response._rowLength, | ||
buffer = response._buffer, | ||
chunkLength = chunk.length; | ||
i < chunkLength; | ||
function createServerReference(id, callServer) { | ||
return createServerReference$1(id, noServerCall); | ||
} | ||
function createFromNodeStream(stream, ssrManifest, options) { | ||
const response = createResponse(ssrManifest.moduleMap, ssrManifest.moduleLoading, noServerCall, options && typeof options.nonce === 'string' ? options.nonce : undefined); | ||
stream.on('data', chunk => { | ||
processBinaryChunk(response, chunk); | ||
) { | ||
var lastIdx = -1; | ||
switch (rowState) { | ||
case 0: | ||
lastIdx = chunk.charCodeAt(i++); | ||
58 === lastIdx | ||
? (rowState = 1) | ||
: (rowID = | ||
(rowID << 4) | (96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 1: | ||
rowState = chunk.charCodeAt(i); | ||
84 === rowState || | ||
65 === rowState || | ||
79 === rowState || | ||
111 === rowState || | ||
85 === rowState || | ||
83 === rowState || | ||
115 === rowState || | ||
76 === rowState || | ||
108 === rowState || | ||
71 === rowState || | ||
103 === rowState || | ||
77 === rowState || | ||
109 === rowState || | ||
86 === rowState | ||
? ((rowTag = rowState), (rowState = 2), i++) | ||
: (64 < rowState && 91 > rowState) || | ||
114 === rowState || | ||
120 === rowState | ||
? ((rowTag = rowState), (rowState = 3), i++) | ||
: ((rowTag = 0), (rowState = 3)); | ||
continue; | ||
case 2: | ||
lastIdx = chunk.charCodeAt(i++); | ||
44 === lastIdx | ||
? (rowState = 4) | ||
: (rowLength = | ||
(rowLength << 4) | | ||
(96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 3: | ||
lastIdx = chunk.indexOf("\n", i); | ||
break; | ||
case 4: | ||
if (84 !== rowTag) | ||
throw Error( | ||
"Binary RSC chunks cannot be encoded as strings. This is a bug in the wiring of the React streams." | ||
); | ||
if (rowLength < chunk.length || chunk.length > 3 * rowLength) | ||
throw Error( | ||
"String chunks need to be passed in their original shape. Not split into smaller string chunks. This is a bug in the wiring of the React streams." | ||
); | ||
lastIdx = chunk.length; | ||
} | ||
if (-1 < lastIdx) { | ||
if (0 < buffer.length) | ||
throw Error( | ||
"String chunks need to be passed in their original shape. Not split into smaller string chunks. This is a bug in the wiring of the React streams." | ||
); | ||
i = chunk.slice(i, lastIdx); | ||
processFullStringRow(response, rowID, rowTag, i); | ||
i = lastIdx; | ||
3 === rowState && i++; | ||
rowLength = rowID = rowTag = rowState = 0; | ||
buffer.length = 0; | ||
} else if (chunk.length !== i) | ||
throw Error( | ||
"String chunks need to be passed in their original shape. Not split into smaller string chunks. This is a bug in the wiring of the React streams." | ||
); | ||
} | ||
response._rowState = rowState; | ||
response._rowID = rowID; | ||
response._rowTag = rowTag; | ||
response._rowLength = rowLength; | ||
} else { | ||
rowLength = 0; | ||
chunkLength = response._rowState; | ||
rowID = response._rowID; | ||
i = response._rowTag; | ||
rowState = response._rowLength; | ||
buffer = response._buffer; | ||
for (rowTag = chunk.length; rowLength < rowTag; ) { | ||
lastIdx = -1; | ||
switch (chunkLength) { | ||
case 0: | ||
lastIdx = chunk[rowLength++]; | ||
58 === lastIdx | ||
? (chunkLength = 1) | ||
: (rowID = | ||
(rowID << 4) | (96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 1: | ||
chunkLength = chunk[rowLength]; | ||
84 === chunkLength || | ||
65 === chunkLength || | ||
79 === chunkLength || | ||
111 === chunkLength || | ||
85 === chunkLength || | ||
83 === chunkLength || | ||
115 === chunkLength || | ||
76 === chunkLength || | ||
108 === chunkLength || | ||
71 === chunkLength || | ||
103 === chunkLength || | ||
77 === chunkLength || | ||
109 === chunkLength || | ||
86 === chunkLength | ||
? ((i = chunkLength), (chunkLength = 2), rowLength++) | ||
: (64 < chunkLength && 91 > chunkLength) || | ||
35 === chunkLength || | ||
114 === chunkLength || | ||
120 === chunkLength | ||
? ((i = chunkLength), (chunkLength = 3), rowLength++) | ||
: ((i = 0), (chunkLength = 3)); | ||
continue; | ||
case 2: | ||
lastIdx = chunk[rowLength++]; | ||
44 === lastIdx | ||
? (chunkLength = 4) | ||
: (rowState = | ||
(rowState << 4) | | ||
(96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 3: | ||
lastIdx = chunk.indexOf(10, rowLength); | ||
break; | ||
case 4: | ||
(lastIdx = rowLength + rowState), | ||
lastIdx > chunk.length && (lastIdx = -1); | ||
} | ||
var offset = chunk.byteOffset + rowLength; | ||
if (-1 < lastIdx) | ||
(rowState = new Uint8Array( | ||
chunk.buffer, | ||
offset, | ||
lastIdx - rowLength | ||
)), | ||
processFullBinaryRow(response, rowID, i, buffer, rowState), | ||
(rowLength = lastIdx), | ||
3 === chunkLength && rowLength++, | ||
(rowState = rowID = i = chunkLength = 0), | ||
(buffer.length = 0); | ||
else { | ||
chunk = new Uint8Array( | ||
chunk.buffer, | ||
offset, | ||
chunk.byteLength - rowLength | ||
); | ||
buffer.push(chunk); | ||
rowState -= chunk.byteLength; | ||
break; | ||
} | ||
} | ||
response._rowState = chunkLength; | ||
response._rowID = rowID; | ||
response._rowTag = i; | ||
response._rowLength = rowState; | ||
} | ||
}); | ||
stream.on('error', error => { | ||
stream.on("error", function (error) { | ||
reportGlobalError(response, error); | ||
}); | ||
stream.on('end', () => close(response)); | ||
return getRoot(response); | ||
} | ||
exports.createFromNodeStream = createFromNodeStream; | ||
exports.createServerReference = createServerReference; | ||
stream.on("end", function () { | ||
reportGlobalError(response, Error("Connection closed.")); | ||
}); | ||
return getChunk(response, 0); | ||
}; | ||
exports.createServerReference = function (id) { | ||
return createServerReference$1(id, noServerCall); | ||
}; |
/** | ||
* @license React | ||
* react-server-dom-webpack-client.node.unbundled.production.min.js | ||
* react-server-dom-webpack-client.node.unbundled.production.js | ||
* | ||
@@ -11,799 +11,638 @@ * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
'use strict'; | ||
var util = require('util'); | ||
var ReactDOM = require('react-dom'); | ||
function createStringDecoder() { | ||
return new util.TextDecoder(); | ||
} | ||
const decoderOptions = { | ||
stream: true | ||
}; | ||
function readPartialStringChunk(decoder, buffer) { | ||
return decoder.decode(buffer, decoderOptions); | ||
} | ||
function readFinalStringChunk(decoder, buffer) { | ||
return decoder.decode(buffer); | ||
} | ||
// This is the parsed shape of the wire format which is why it is | ||
// condensed to only the essentialy information | ||
const ID = 0; | ||
const CHUNKS = 1; | ||
const NAME = 2; // export const ASYNC = 3; | ||
// This logic is correct because currently only include the 4th tuple member | ||
// when the module is async. If that changes we will need to actually assert | ||
// the value is true. We don't index into the 4th slot because flow does not | ||
// like the potential out of bounds access | ||
function isAsyncImport(metadata) { | ||
return metadata.length === 4; | ||
} | ||
// The reason this function needs to defined here in this file instead of just | ||
// being exported directly from the WebpackDestination... file is because the | ||
// ClientReferenceMetadata is opaque and we can't unwrap it there. | ||
// This should get inlined and we could also just implement an unwrapping function | ||
// though that risks it getting used in places it shouldn't be. This is unfortunate | ||
// but currently it seems to be the best option we have. | ||
function prepareDestinationForModule(moduleLoading, nonce, metadata) { | ||
prepareDestinationWithChunks(moduleLoading, metadata[CHUNKS], nonce); | ||
} | ||
"use strict"; | ||
var util = require("util"), | ||
ReactDOM = require("react-dom"), | ||
decoderOptions = { stream: !0 }; | ||
function resolveClientReference(bundlerConfig, metadata) { | ||
const moduleExports = bundlerConfig[metadata[ID]]; | ||
let resolvedModuleData = moduleExports[metadata[NAME]]; | ||
let name; | ||
if (resolvedModuleData) { | ||
// The potentially aliased name. | ||
name = resolvedModuleData.name; | ||
} else { | ||
// If we don't have this specific name, we might have the full module. | ||
resolvedModuleData = moduleExports['*']; | ||
if (!resolvedModuleData) { | ||
throw new Error('Could not find the module "' + metadata[ID] + '" in the React SSR Manifest. ' + 'This is probably a bug in the React Server Components bundler.'); | ||
} | ||
name = metadata[NAME]; | ||
var moduleExports = bundlerConfig[metadata[0]]; | ||
if ((bundlerConfig = moduleExports && moduleExports[metadata[2]])) | ||
moduleExports = bundlerConfig.name; | ||
else { | ||
bundlerConfig = moduleExports && moduleExports["*"]; | ||
if (!bundlerConfig) | ||
throw Error( | ||
'Could not find the module "' + | ||
metadata[0] + | ||
'" in the React Server Consumer Manifest. This is probably a bug in the React Server Components bundler.' | ||
); | ||
moduleExports = metadata[2]; | ||
} | ||
return { | ||
specifier: resolvedModuleData.specifier, | ||
name: name, | ||
async: isAsyncImport(metadata) | ||
specifier: bundlerConfig.specifier, | ||
name: moduleExports, | ||
async: 4 === metadata.length | ||
}; | ||
} | ||
const asyncModuleCache = new Map(); | ||
function resolveServerReference(bundlerConfig, id) { | ||
var idx = id.lastIndexOf("#"); | ||
bundlerConfig = id.slice(0, idx); | ||
id = id.slice(idx + 1); | ||
return { specifier: bundlerConfig, name: id }; | ||
} | ||
var asyncModuleCache = new Map(); | ||
function preloadModule(metadata) { | ||
const existingPromise = asyncModuleCache.get(metadata.specifier); | ||
if (existingPromise) { | ||
if (existingPromise.status === 'fulfilled') { | ||
return null; | ||
} | ||
return existingPromise; | ||
} else { | ||
// $FlowFixMe[unsupported-syntax] | ||
let modulePromise = import(metadata.specifier); | ||
if (metadata.async) { | ||
// If the module is async, it must have been a CJS module. | ||
// CJS modules are accessed through the default export in | ||
// Node.js so we have to get the default export to get the | ||
// full module exports. | ||
modulePromise = modulePromise.then(function (value) { | ||
return value.default; | ||
}); | ||
} | ||
modulePromise.then(value => { | ||
const fulfilledThenable = modulePromise; | ||
fulfilledThenable.status = 'fulfilled'; | ||
var existingPromise = asyncModuleCache.get(metadata.specifier); | ||
if (existingPromise) | ||
return "fulfilled" === existingPromise.status ? null : existingPromise; | ||
var modulePromise = import(metadata.specifier); | ||
metadata.async && | ||
(modulePromise = modulePromise.then(function (value) { | ||
return value.default; | ||
})); | ||
modulePromise.then( | ||
function (value) { | ||
var fulfilledThenable = modulePromise; | ||
fulfilledThenable.status = "fulfilled"; | ||
fulfilledThenable.value = value; | ||
}, reason => { | ||
const rejectedThenable = modulePromise; | ||
rejectedThenable.status = 'rejected'; | ||
}, | ||
function (reason) { | ||
var rejectedThenable = modulePromise; | ||
rejectedThenable.status = "rejected"; | ||
rejectedThenable.reason = reason; | ||
}); | ||
asyncModuleCache.set(metadata.specifier, modulePromise); | ||
return modulePromise; | ||
} | ||
} | ||
); | ||
asyncModuleCache.set(metadata.specifier, modulePromise); | ||
return modulePromise; | ||
} | ||
function requireModule(metadata) { | ||
let moduleExports; // We assume that preloadModule has been called before, which | ||
// should have added something to the module cache. | ||
const promise = asyncModuleCache.get(metadata.specifier); | ||
if (promise.status === 'fulfilled') { | ||
moduleExports = promise.value; | ||
} else { | ||
throw promise.reason; | ||
} | ||
if (metadata.name === '*') { | ||
// This is a placeholder value that represents that the caller imported this | ||
// as a CommonJS module as is. | ||
return moduleExports; | ||
} | ||
if (metadata.name === '') { | ||
// This is a placeholder value that represents that the caller accessed the | ||
// default property of this if it was an ESM interop module. | ||
return moduleExports.default; | ||
} | ||
return moduleExports[metadata.name]; | ||
var moduleExports = asyncModuleCache.get(metadata.specifier); | ||
if ("fulfilled" === moduleExports.status) moduleExports = moduleExports.value; | ||
else throw moduleExports.reason; | ||
return "*" === metadata.name | ||
? moduleExports | ||
: "" === metadata.name | ||
? moduleExports.default | ||
: moduleExports[metadata.name]; | ||
} | ||
function prepareDestinationWithChunks(moduleLoading, // Chunks are double-indexed [..., idx, filenamex, idy, filenamey, ...] | ||
chunks, nonce) { | ||
if (moduleLoading !== null) { | ||
for (let i = 1; i < chunks.length; i += 2) { | ||
preinitScriptForSSR(moduleLoading.prefix + chunks[i], nonce, moduleLoading.crossOrigin); | ||
function prepareDestinationWithChunks(moduleLoading, chunks, nonce$jscomp$0) { | ||
if (null !== moduleLoading) | ||
for (var i = 1; i < chunks.length; i += 2) { | ||
var nonce = nonce$jscomp$0, | ||
JSCompiler_temp_const = ReactDOMSharedInternals.d, | ||
JSCompiler_temp_const$jscomp$0 = JSCompiler_temp_const.X, | ||
JSCompiler_temp_const$jscomp$1 = moduleLoading.prefix + chunks[i]; | ||
var JSCompiler_inline_result = moduleLoading.crossOrigin; | ||
JSCompiler_inline_result = | ||
"string" === typeof JSCompiler_inline_result | ||
? "use-credentials" === JSCompiler_inline_result | ||
? JSCompiler_inline_result | ||
: "" | ||
: void 0; | ||
JSCompiler_temp_const$jscomp$0.call( | ||
JSCompiler_temp_const, | ||
JSCompiler_temp_const$jscomp$1, | ||
{ crossOrigin: JSCompiler_inline_result, nonce: nonce } | ||
); | ||
} | ||
} | ||
} | ||
const ReactDOMSharedInternals = ReactDOM.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED; | ||
function getCrossOriginString(input) { | ||
if (typeof input === 'string') { | ||
return input === 'use-credentials' ? input : ''; | ||
} | ||
return undefined; | ||
var ReactDOMSharedInternals = | ||
ReactDOM.__DOM_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE, | ||
REACT_ELEMENT_TYPE = Symbol.for("react.transitional.element"), | ||
REACT_LAZY_TYPE = Symbol.for("react.lazy"), | ||
REACT_POSTPONE_TYPE = Symbol.for("react.postpone"), | ||
MAYBE_ITERATOR_SYMBOL = Symbol.iterator; | ||
function getIteratorFn(maybeIterable) { | ||
if (null === maybeIterable || "object" !== typeof maybeIterable) return null; | ||
maybeIterable = | ||
(MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL]) || | ||
maybeIterable["@@iterator"]; | ||
return "function" === typeof maybeIterable ? maybeIterable : null; | ||
} | ||
// This client file is in the shared folder because it applies to both SSR and browser contexts. | ||
const ReactDOMCurrentDispatcher = ReactDOMSharedInternals.Dispatcher; | ||
function dispatchHint(code, model) { | ||
const dispatcher = ReactDOMCurrentDispatcher.current; | ||
if (dispatcher) { | ||
switch (code) { | ||
case 'D': | ||
{ | ||
const refined = refineModel(code, model); | ||
const href = refined; | ||
dispatcher.prefetchDNS(href); | ||
return; | ||
} | ||
case 'C': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preconnect(href); | ||
} else { | ||
const href = refined[0]; | ||
const crossOrigin = refined[1]; | ||
dispatcher.preconnect(href, crossOrigin); | ||
} | ||
return; | ||
} | ||
case 'L': | ||
{ | ||
const refined = refineModel(code, model); | ||
const href = refined[0]; | ||
const as = refined[1]; | ||
if (refined.length === 3) { | ||
const options = refined[2]; | ||
dispatcher.preload(href, as, options); | ||
} else { | ||
dispatcher.preload(href, as); | ||
} | ||
return; | ||
} | ||
case 'm': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preloadModule(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preloadModule(href, options); | ||
} | ||
return; | ||
} | ||
case 'S': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitStyle(href); | ||
} else { | ||
const href = refined[0]; | ||
const precedence = refined[1] === 0 ? undefined : refined[1]; | ||
const options = refined.length === 3 ? refined[2] : undefined; | ||
dispatcher.preinitStyle(href, precedence, options); | ||
} | ||
return; | ||
} | ||
case 'X': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitScript(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preinitScript(href, options); | ||
} | ||
return; | ||
} | ||
case 'M': | ||
{ | ||
const refined = refineModel(code, model); | ||
if (typeof refined === 'string') { | ||
const href = refined; | ||
dispatcher.preinitModuleScript(href); | ||
} else { | ||
const href = refined[0]; | ||
const options = refined[1]; | ||
dispatcher.preinitModuleScript(href, options); | ||
} | ||
return; | ||
} | ||
} | ||
} | ||
} // Flow is having trouble refining the HintModels so we help it a bit. | ||
// This should be compiled out in the production build. | ||
function refineModel(code, model) { | ||
return model; | ||
var ASYNC_ITERATOR = Symbol.asyncIterator, | ||
isArrayImpl = Array.isArray, | ||
getPrototypeOf = Object.getPrototypeOf, | ||
ObjectPrototype = Object.prototype, | ||
knownServerReferences = new WeakMap(); | ||
function serializeNumber(number) { | ||
return Number.isFinite(number) | ||
? 0 === number && -Infinity === 1 / number | ||
? "$-0" | ||
: number | ||
: Infinity === number | ||
? "$Infinity" | ||
: -Infinity === number | ||
? "$-Infinity" | ||
: "$NaN"; | ||
} | ||
function preinitScriptForSSR(href, nonce, crossOrigin) { | ||
const dispatcher = ReactDOMCurrentDispatcher.current; | ||
if (dispatcher) { | ||
dispatcher.preinitScript(href, { | ||
crossOrigin: getCrossOriginString(crossOrigin), | ||
nonce | ||
}); | ||
function processReply( | ||
root, | ||
formFieldPrefix, | ||
temporaryReferences, | ||
resolve, | ||
reject | ||
) { | ||
function serializeTypedArray(tag, typedArray) { | ||
typedArray = new Blob([ | ||
new Uint8Array( | ||
typedArray.buffer, | ||
typedArray.byteOffset, | ||
typedArray.byteLength | ||
) | ||
]); | ||
var blobId = nextPartId++; | ||
null === formData && (formData = new FormData()); | ||
formData.append(formFieldPrefix + blobId, typedArray); | ||
return "$" + tag + blobId.toString(16); | ||
} | ||
} | ||
// ATTENTION | ||
// When adding new symbols to this file, | ||
// Please consider also adding to 'react-devtools-shared/src/backend/ReactSymbols' | ||
// The Symbol used to tag the ReactElement-like types. | ||
const REACT_ELEMENT_TYPE = Symbol.for('react.element'); | ||
const REACT_LAZY_TYPE = Symbol.for('react.lazy'); | ||
const REACT_POSTPONE_TYPE = Symbol.for('react.postpone'); | ||
const MAYBE_ITERATOR_SYMBOL = Symbol.iterator; | ||
const FAUX_ITERATOR_SYMBOL = '@@iterator'; | ||
function getIteratorFn(maybeIterable) { | ||
if (maybeIterable === null || typeof maybeIterable !== 'object') { | ||
return null; | ||
function serializeBinaryReader(reader) { | ||
function progress(entry) { | ||
entry.done | ||
? ((entry = nextPartId++), | ||
data.append(formFieldPrefix + entry, new Blob(buffer)), | ||
data.append( | ||
formFieldPrefix + streamId, | ||
'"$o' + entry.toString(16) + '"' | ||
), | ||
data.append(formFieldPrefix + streamId, "C"), | ||
pendingParts--, | ||
0 === pendingParts && resolve(data)) | ||
: (buffer.push(entry.value), | ||
reader.read(new Uint8Array(1024)).then(progress, reject)); | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++, | ||
buffer = []; | ||
reader.read(new Uint8Array(1024)).then(progress, reject); | ||
return "$r" + streamId.toString(16); | ||
} | ||
const maybeIterator = MAYBE_ITERATOR_SYMBOL && maybeIterable[MAYBE_ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL]; | ||
if (typeof maybeIterator === 'function') { | ||
return maybeIterator; | ||
function serializeReader(reader) { | ||
function progress(entry) { | ||
if (entry.done) | ||
data.append(formFieldPrefix + streamId, "C"), | ||
pendingParts--, | ||
0 === pendingParts && resolve(data); | ||
else | ||
try { | ||
var partJSON = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, partJSON); | ||
reader.read().then(progress, reject); | ||
} catch (x) { | ||
reject(x); | ||
} | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++; | ||
reader.read().then(progress, reject); | ||
return "$R" + streamId.toString(16); | ||
} | ||
return null; | ||
} | ||
const isArrayImpl = Array.isArray; // eslint-disable-next-line no-redeclare | ||
function isArray(a) { | ||
return isArrayImpl(a); | ||
} | ||
const getPrototypeOf = Object.getPrototypeOf; | ||
const ObjectPrototype = Object.prototype; | ||
const knownServerReferences = new WeakMap(); // Serializable values | ||
// Thenable<ReactServerValue> | ||
// function serializeByValueID(id: number): string { | ||
// return '$' + id.toString(16); | ||
// } | ||
function serializePromiseID(id) { | ||
return '$@' + id.toString(16); | ||
} | ||
function serializeServerReferenceID(id) { | ||
return '$F' + id.toString(16); | ||
} | ||
function serializeSymbolReference(name) { | ||
return '$S' + name; | ||
} | ||
function serializeFormDataReference(id) { | ||
// Why K? F is "Function". D is "Date". What else? | ||
return '$K' + id.toString(16); | ||
} | ||
function serializeNumber(number) { | ||
if (Number.isFinite(number)) { | ||
if (number === 0 && 1 / number === -Infinity) { | ||
return '$-0'; | ||
} else { | ||
return number; | ||
function serializeReadableStream(stream) { | ||
try { | ||
var binaryReader = stream.getReader({ mode: "byob" }); | ||
} catch (x) { | ||
return serializeReader(stream.getReader()); | ||
} | ||
} else { | ||
if (number === Infinity) { | ||
return '$Infinity'; | ||
} else if (number === -Infinity) { | ||
return '$-Infinity'; | ||
} else { | ||
return '$NaN'; | ||
return serializeBinaryReader(binaryReader); | ||
} | ||
function serializeAsyncIterable(iterable, iterator) { | ||
function progress(entry) { | ||
if (entry.done) { | ||
if (void 0 === entry.value) | ||
data.append(formFieldPrefix + streamId, "C"); | ||
else | ||
try { | ||
var partJSON = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, "C" + partJSON); | ||
} catch (x) { | ||
reject(x); | ||
return; | ||
} | ||
pendingParts--; | ||
0 === pendingParts && resolve(data); | ||
} else | ||
try { | ||
var partJSON$22 = JSON.stringify(entry.value, resolveToJSON); | ||
data.append(formFieldPrefix + streamId, partJSON$22); | ||
iterator.next().then(progress, reject); | ||
} catch (x$23) { | ||
reject(x$23); | ||
} | ||
} | ||
null === formData && (formData = new FormData()); | ||
var data = formData; | ||
pendingParts++; | ||
var streamId = nextPartId++; | ||
iterable = iterable === iterator; | ||
iterator.next().then(progress, reject); | ||
return "$" + (iterable ? "x" : "X") + streamId.toString(16); | ||
} | ||
} | ||
function serializeUndefined() { | ||
return '$undefined'; | ||
} | ||
function serializeDateFromDateJSON(dateJSON) { | ||
// JSON.stringify automatically calls Date.prototype.toJSON which calls toISOString. | ||
// We need only tack on a $D prefix. | ||
return '$D' + dateJSON; | ||
} | ||
function serializeBigInt(n) { | ||
return '$n' + n.toString(10); | ||
} | ||
function serializeMapID(id) { | ||
return '$Q' + id.toString(16); | ||
} | ||
function serializeSetID(id) { | ||
return '$W' + id.toString(16); | ||
} | ||
function escapeStringValue(value) { | ||
if (value[0] === '$') { | ||
// We need to escape $ prefixed strings since we use those to encode | ||
// references to IDs and as special symbol values. | ||
return '$' + value; | ||
} else { | ||
return value; | ||
} | ||
} | ||
function processReply(root, formFieldPrefix, resolve, reject) { | ||
let nextPartId = 1; | ||
let pendingParts = 0; | ||
let formData = null; | ||
function resolveToJSON(key, value) { | ||
const parent = this; // Make sure that `parent[key]` wasn't JSONified before `value` was passed to us | ||
if (value === null) { | ||
return null; | ||
} | ||
if (typeof value === 'object') { | ||
// $FlowFixMe[method-unbinding] | ||
if (typeof value.then === 'function') { | ||
// We assume that any object with a .then property is a "Thenable" type, | ||
// or a Promise type. Either of which can be represented by a Promise. | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to stream this value. | ||
formData = new FormData(); | ||
} | ||
if (null === value) return null; | ||
if ("object" === typeof value) { | ||
switch (value.$$typeof) { | ||
case REACT_ELEMENT_TYPE: | ||
if (void 0 !== temporaryReferences && -1 === key.indexOf(":")) { | ||
var parentReference = writtenObjects.get(this); | ||
if (void 0 !== parentReference) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), | ||
"$T" | ||
); | ||
} | ||
throw Error( | ||
"React Element cannot be passed to Server Functions from the Client without a temporary reference set. Pass a TemporaryReferenceSet to the options." | ||
); | ||
case REACT_LAZY_TYPE: | ||
parentReference = value._payload; | ||
var init = value._init; | ||
null === formData && (formData = new FormData()); | ||
pendingParts++; | ||
try { | ||
var resolvedModel = init(parentReference), | ||
lazyId = nextPartId++, | ||
partJSON = serializeModel(resolvedModel, lazyId); | ||
formData.append(formFieldPrefix + lazyId, partJSON); | ||
return "$" + lazyId.toString(16); | ||
} catch (x) { | ||
if ( | ||
"object" === typeof x && | ||
null !== x && | ||
"function" === typeof x.then | ||
) { | ||
pendingParts++; | ||
var lazyId$24 = nextPartId++; | ||
parentReference = function () { | ||
try { | ||
var partJSON$25 = serializeModel(value, lazyId$24), | ||
data$26 = formData; | ||
data$26.append(formFieldPrefix + lazyId$24, partJSON$25); | ||
pendingParts--; | ||
0 === pendingParts && resolve(data$26); | ||
} catch (reason) { | ||
reject(reason); | ||
} | ||
}; | ||
x.then(parentReference, parentReference); | ||
return "$" + lazyId$24.toString(16); | ||
} | ||
reject(x); | ||
return null; | ||
} finally { | ||
pendingParts--; | ||
} | ||
} | ||
if ("function" === typeof value.then) { | ||
null === formData && (formData = new FormData()); | ||
pendingParts++; | ||
const promiseId = nextPartId++; | ||
const thenable = value; | ||
thenable.then(partValue => { | ||
const partJSON = JSON.stringify(partValue, resolveToJSON); // $FlowFixMe[incompatible-type] We know it's not null because we assigned it above. | ||
const data = formData; // eslint-disable-next-line react-internal/safe-string-coercion | ||
data.append(formFieldPrefix + promiseId, partJSON); | ||
pendingParts--; | ||
if (pendingParts === 0) { | ||
resolve(data); | ||
var promiseId = nextPartId++; | ||
value.then(function (partValue) { | ||
try { | ||
var partJSON$28 = serializeModel(partValue, promiseId); | ||
partValue = formData; | ||
partValue.append(formFieldPrefix + promiseId, partJSON$28); | ||
pendingParts--; | ||
0 === pendingParts && resolve(partValue); | ||
} catch (reason) { | ||
reject(reason); | ||
} | ||
}, reason => { | ||
// In the future we could consider serializing this as an error | ||
// that throws on the server instead. | ||
reject(reason); | ||
}); | ||
return serializePromiseID(promiseId); | ||
}, reject); | ||
return "$@" + promiseId.toString(16); | ||
} | ||
if (isArray(value)) { | ||
// $FlowFixMe[incompatible-return] | ||
return value; | ||
} // TODO: Should we the Object.prototype.toString.call() to test for cross-realm objects? | ||
parentReference = writtenObjects.get(value); | ||
if (void 0 !== parentReference) | ||
if (modelRoot === value) modelRoot = null; | ||
else return parentReference; | ||
else | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference && | ||
((key = parentReference + ":" + key), | ||
writtenObjects.set(value, key), | ||
void 0 !== temporaryReferences && | ||
temporaryReferences.set(key, value))); | ||
if (isArrayImpl(value)) return value; | ||
if (value instanceof FormData) { | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to use rich objects as its values. | ||
formData = new FormData(); | ||
} | ||
const data = formData; | ||
const refId = nextPartId++; // Copy all the form fields with a prefix for this reference. | ||
// These must come first in the form order because we assume that all the | ||
// fields are available before this is referenced. | ||
const prefix = formFieldPrefix + refId + '_'; // $FlowFixMe[prop-missing]: FormData has forEach. | ||
value.forEach((originalValue, originalKey) => { | ||
data.append(prefix + originalKey, originalValue); | ||
null === formData && (formData = new FormData()); | ||
var data$32 = formData; | ||
key = nextPartId++; | ||
var prefix = formFieldPrefix + key + "_"; | ||
value.forEach(function (originalValue, originalKey) { | ||
data$32.append(prefix + originalKey, originalValue); | ||
}); | ||
return serializeFormDataReference(refId); | ||
return "$K" + key.toString(16); | ||
} | ||
if (value instanceof Map) { | ||
const partJSON = JSON.stringify(Array.from(value), resolveToJSON); | ||
if (formData === null) { | ||
formData = new FormData(); | ||
} | ||
const mapId = nextPartId++; | ||
formData.append(formFieldPrefix + mapId, partJSON); | ||
return serializeMapID(mapId); | ||
if (value instanceof Map) | ||
return ( | ||
(key = nextPartId++), | ||
(parentReference = serializeModel(Array.from(value), key)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$Q" + key.toString(16) | ||
); | ||
if (value instanceof Set) | ||
return ( | ||
(key = nextPartId++), | ||
(parentReference = serializeModel(Array.from(value), key)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$W" + key.toString(16) | ||
); | ||
if (value instanceof ArrayBuffer) | ||
return ( | ||
(key = new Blob([value])), | ||
(parentReference = nextPartId++), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + parentReference, key), | ||
"$A" + parentReference.toString(16) | ||
); | ||
if (value instanceof Int8Array) return serializeTypedArray("O", value); | ||
if (value instanceof Uint8Array) return serializeTypedArray("o", value); | ||
if (value instanceof Uint8ClampedArray) | ||
return serializeTypedArray("U", value); | ||
if (value instanceof Int16Array) return serializeTypedArray("S", value); | ||
if (value instanceof Uint16Array) return serializeTypedArray("s", value); | ||
if (value instanceof Int32Array) return serializeTypedArray("L", value); | ||
if (value instanceof Uint32Array) return serializeTypedArray("l", value); | ||
if (value instanceof Float32Array) return serializeTypedArray("G", value); | ||
if (value instanceof Float64Array) return serializeTypedArray("g", value); | ||
if (value instanceof BigInt64Array) | ||
return serializeTypedArray("M", value); | ||
if (value instanceof BigUint64Array) | ||
return serializeTypedArray("m", value); | ||
if (value instanceof DataView) return serializeTypedArray("V", value); | ||
if ("function" === typeof Blob && value instanceof Blob) | ||
return ( | ||
null === formData && (formData = new FormData()), | ||
(key = nextPartId++), | ||
formData.append(formFieldPrefix + key, value), | ||
"$B" + key.toString(16) | ||
); | ||
if ((key = getIteratorFn(value))) | ||
return ( | ||
(parentReference = key.call(value)), | ||
parentReference === value | ||
? ((key = nextPartId++), | ||
(parentReference = serializeModel( | ||
Array.from(parentReference), | ||
key | ||
)), | ||
null === formData && (formData = new FormData()), | ||
formData.append(formFieldPrefix + key, parentReference), | ||
"$i" + key.toString(16)) | ||
: Array.from(parentReference) | ||
); | ||
if ( | ||
"function" === typeof ReadableStream && | ||
value instanceof ReadableStream | ||
) | ||
return serializeReadableStream(value); | ||
key = value[ASYNC_ITERATOR]; | ||
if ("function" === typeof key) | ||
return serializeAsyncIterable(value, key.call(value)); | ||
key = getPrototypeOf(value); | ||
if ( | ||
key !== ObjectPrototype && | ||
(null === key || null !== getPrototypeOf(key)) | ||
) { | ||
if (void 0 === temporaryReferences) | ||
throw Error( | ||
"Only plain objects, and a few built-ins, can be passed to Server Functions. Classes or null prototypes are not supported." | ||
); | ||
return "$T"; | ||
} | ||
if (value instanceof Set) { | ||
const partJSON = JSON.stringify(Array.from(value), resolveToJSON); | ||
if (formData === null) { | ||
formData = new FormData(); | ||
} | ||
const setId = nextPartId++; | ||
formData.append(formFieldPrefix + setId, partJSON); | ||
return serializeSetID(setId); | ||
} | ||
const iteratorFn = getIteratorFn(value); | ||
if (iteratorFn) { | ||
return Array.from(value); | ||
} // Verify that this is a simple plain object. | ||
const proto = getPrototypeOf(value); | ||
if (proto !== ObjectPrototype && (proto === null || getPrototypeOf(proto) !== null)) { | ||
throw new Error('Only plain objects, and a few built-ins, can be passed to Server Actions. ' + 'Classes or null prototypes are not supported.'); | ||
} | ||
return value; | ||
} | ||
if (typeof value === 'string') { | ||
// TODO: Maybe too clever. If we support URL there's no similar trick. | ||
if (value[value.length - 1] === 'Z') { | ||
// Possibly a Date, whose toJSON automatically calls toISOString | ||
// $FlowFixMe[incompatible-use] | ||
const originalValue = parent[key]; | ||
if (originalValue instanceof Date) { | ||
return serializeDateFromDateJSON(value); | ||
} | ||
} | ||
return escapeStringValue(value); | ||
if ("string" === typeof value) { | ||
if ("Z" === value[value.length - 1] && this[key] instanceof Date) | ||
return "$D" + value; | ||
key = "$" === value[0] ? "$" + value : value; | ||
return key; | ||
} | ||
if (typeof value === 'boolean') { | ||
return value; | ||
if ("boolean" === typeof value) return value; | ||
if ("number" === typeof value) return serializeNumber(value); | ||
if ("undefined" === typeof value) return "$undefined"; | ||
if ("function" === typeof value) { | ||
parentReference = knownServerReferences.get(value); | ||
if (void 0 !== parentReference) | ||
return ( | ||
(key = JSON.stringify(parentReference, resolveToJSON)), | ||
null === formData && (formData = new FormData()), | ||
(parentReference = nextPartId++), | ||
formData.set(formFieldPrefix + parentReference, key), | ||
"$F" + parentReference.toString(16) | ||
); | ||
if ( | ||
void 0 !== temporaryReferences && | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference) | ||
) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), "$T" | ||
); | ||
throw Error( | ||
"Client Functions cannot be passed directly to Server Functions. Only Functions passed from the Server can be passed back again." | ||
); | ||
} | ||
if (typeof value === 'number') { | ||
return serializeNumber(value); | ||
if ("symbol" === typeof value) { | ||
if ( | ||
void 0 !== temporaryReferences && | ||
-1 === key.indexOf(":") && | ||
((parentReference = writtenObjects.get(this)), | ||
void 0 !== parentReference) | ||
) | ||
return ( | ||
temporaryReferences.set(parentReference + ":" + key, value), "$T" | ||
); | ||
throw Error( | ||
"Symbols cannot be passed to a Server Function without a temporary reference set. Pass a TemporaryReferenceSet to the options." | ||
); | ||
} | ||
if (typeof value === 'undefined') { | ||
return serializeUndefined(); | ||
} | ||
if (typeof value === 'function') { | ||
const metaData = knownServerReferences.get(value); | ||
if (metaData !== undefined) { | ||
const metaDataJSON = JSON.stringify(metaData, resolveToJSON); | ||
if (formData === null) { | ||
// Upgrade to use FormData to allow us to stream this value. | ||
formData = new FormData(); | ||
} // The reference to this function came from the same client so we can pass it back. | ||
const refId = nextPartId++; // eslint-disable-next-line react-internal/safe-string-coercion | ||
formData.set(formFieldPrefix + refId, metaDataJSON); | ||
return serializeServerReferenceID(refId); | ||
} | ||
throw new Error('Client Functions cannot be passed directly to Server Functions. ' + 'Only Functions passed from the Server can be passed back again.'); | ||
} | ||
if (typeof value === 'symbol') { | ||
// $FlowFixMe[incompatible-type] `description` might be undefined | ||
const name = value.description; | ||
if (Symbol.for(name) !== value) { | ||
throw new Error('Only global symbols received from Symbol.for(...) can be passed to Server Functions. ' + ("The symbol Symbol.for(" + // $FlowFixMe[incompatible-type] `description` might be undefined | ||
value.description + ") cannot be found among global symbols.")); | ||
} | ||
return serializeSymbolReference(name); | ||
} | ||
if (typeof value === 'bigint') { | ||
return serializeBigInt(value); | ||
} | ||
throw new Error("Type " + typeof value + " is not supported as an argument to a Server Function."); | ||
} // $FlowFixMe[incompatible-type] it's not going to be undefined because we'll encode it. | ||
const json = JSON.stringify(root, resolveToJSON); | ||
if (formData === null) { | ||
// If it's a simple data structure, we just use plain JSON. | ||
resolve(json); | ||
} else { | ||
// Otherwise, we use FormData to let us stream in the result. | ||
formData.set(formFieldPrefix + '0', json); | ||
if (pendingParts === 0) { | ||
// $FlowFixMe[incompatible-call] this has already been refined. | ||
resolve(formData); | ||
} | ||
if ("bigint" === typeof value) return "$n" + value.toString(10); | ||
throw Error( | ||
"Type " + | ||
typeof value + | ||
" is not supported as an argument to a Server Function." | ||
); | ||
} | ||
function serializeModel(model, id) { | ||
"object" === typeof model && | ||
null !== model && | ||
((id = "$" + id.toString(16)), | ||
writtenObjects.set(model, id), | ||
void 0 !== temporaryReferences && temporaryReferences.set(id, model)); | ||
modelRoot = model; | ||
return JSON.stringify(model, resolveToJSON); | ||
} | ||
var nextPartId = 1, | ||
pendingParts = 0, | ||
formData = null, | ||
writtenObjects = new WeakMap(), | ||
modelRoot = root, | ||
json = serializeModel(root, 0); | ||
null === formData | ||
? resolve(json) | ||
: (formData.set(formFieldPrefix + "0", json), | ||
0 === pendingParts && resolve(formData)); | ||
return function () { | ||
0 < pendingParts && | ||
((pendingParts = 0), | ||
null === formData ? resolve(json) : resolve(formData)); | ||
}; | ||
} | ||
const boundCache = new WeakMap(); | ||
var boundCache = new WeakMap(); | ||
function encodeFormData(reference) { | ||
let resolve, reject; // We need to have a handle on the thenable so that we can synchronously set | ||
// its status from processReply, when it can complete synchronously. | ||
const thenable = new Promise((res, rej) => { | ||
resolve = res; | ||
reject = rej; | ||
}); | ||
processReply(reference, '', body => { | ||
if (typeof body === 'string') { | ||
const data = new FormData(); | ||
data.append('0', body); | ||
body = data; | ||
var resolve, | ||
reject, | ||
thenable = new Promise(function (res, rej) { | ||
resolve = res; | ||
reject = rej; | ||
}); | ||
processReply( | ||
reference, | ||
"", | ||
void 0, | ||
function (body) { | ||
if ("string" === typeof body) { | ||
var data = new FormData(); | ||
data.append("0", body); | ||
body = data; | ||
} | ||
thenable.status = "fulfilled"; | ||
thenable.value = body; | ||
resolve(body); | ||
}, | ||
function (e) { | ||
thenable.status = "rejected"; | ||
thenable.reason = e; | ||
reject(e); | ||
} | ||
const fulfilled = thenable; | ||
fulfilled.status = 'fulfilled'; | ||
fulfilled.value = body; | ||
resolve(body); | ||
}, e => { | ||
const rejected = thenable; | ||
rejected.status = 'rejected'; | ||
rejected.reason = e; | ||
reject(e); | ||
}); | ||
); | ||
return thenable; | ||
} | ||
function encodeFormAction(identifierPrefix) { | ||
const reference = knownServerReferences.get(this); | ||
if (!reference) { | ||
throw new Error('Tried to encode a Server Action from a different instance than the encoder is from. ' + 'This is a bug in React.'); | ||
} | ||
let data = null; | ||
let name; | ||
const boundPromise = reference.bound; | ||
if (boundPromise !== null) { | ||
let thenable = boundCache.get(reference); | ||
if (!thenable) { | ||
thenable = encodeFormData(reference); | ||
boundCache.set(reference, thenable); | ||
} | ||
if (thenable.status === 'rejected') { | ||
throw thenable.reason; | ||
} else if (thenable.status !== 'fulfilled') { | ||
throw thenable; | ||
} | ||
const encodedFormData = thenable.value; // This is hacky but we need the identifier prefix to be added to | ||
// all fields but the suspense cache would break since we might get | ||
// a new identifier each time. So we just append it at the end instead. | ||
const prefixedData = new FormData(); // $FlowFixMe[prop-missing] | ||
encodedFormData.forEach((value, key) => { | ||
prefixedData.append('$ACTION_' + identifierPrefix + ':' + key, value); | ||
function defaultEncodeFormAction(identifierPrefix) { | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
var data = null; | ||
if (null !== reference.bound) { | ||
data = boundCache.get(reference); | ||
data || | ||
((data = encodeFormData(reference)), boundCache.set(reference, data)); | ||
if ("rejected" === data.status) throw data.reason; | ||
if ("fulfilled" !== data.status) throw data; | ||
reference = data.value; | ||
var prefixedData = new FormData(); | ||
reference.forEach(function (value, key) { | ||
prefixedData.append("$ACTION_" + identifierPrefix + ":" + key, value); | ||
}); | ||
data = prefixedData; // We encode the name of the prefix containing the data. | ||
name = '$ACTION_REF_' + identifierPrefix; | ||
} else { | ||
// This is the simple case so we can just encode the ID. | ||
name = '$ACTION_ID_' + reference.id; | ||
} | ||
data = prefixedData; | ||
reference = "$ACTION_REF_" + identifierPrefix; | ||
} else reference = "$ACTION_ID_" + reference.id; | ||
return { | ||
name: name, | ||
method: 'POST', | ||
encType: 'multipart/form-data', | ||
name: reference, | ||
method: "POST", | ||
encType: "multipart/form-data", | ||
data: data | ||
}; | ||
} | ||
function isSignatureEqual(referenceId, numberOfBoundArgs) { | ||
const reference = knownServerReferences.get(this); | ||
if (!reference) { | ||
throw new Error('Tried to encode a Server Action from a different instance than the encoder is from. ' + 'This is a bug in React.'); | ||
} | ||
if (reference.id !== referenceId) { | ||
// These are different functions. | ||
return false; | ||
} // Now check if the number of bound arguments is the same. | ||
const boundPromise = reference.bound; | ||
if (boundPromise === null) { | ||
// No bound arguments. | ||
return numberOfBoundArgs === 0; | ||
} // Unwrap the bound arguments array by suspending, if necessary. As with | ||
// encodeFormData, this means isSignatureEqual can only be called while React | ||
// is rendering. | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
if (reference.id !== referenceId) return !1; | ||
var boundPromise = reference.bound; | ||
if (null === boundPromise) return 0 === numberOfBoundArgs; | ||
switch (boundPromise.status) { | ||
case 'fulfilled': | ||
{ | ||
const boundArgs = boundPromise.value; | ||
return boundArgs.length === numberOfBoundArgs; | ||
} | ||
case 'pending': | ||
{ | ||
throw boundPromise; | ||
} | ||
case 'rejected': | ||
{ | ||
throw boundPromise.reason; | ||
} | ||
case "fulfilled": | ||
return boundPromise.value.length === numberOfBoundArgs; | ||
case "pending": | ||
throw boundPromise; | ||
case "rejected": | ||
throw boundPromise.reason; | ||
default: | ||
{ | ||
if (typeof boundPromise.status === 'string') ; else { | ||
const pendingThenable = boundPromise; | ||
pendingThenable.status = 'pending'; | ||
pendingThenable.then(boundArgs => { | ||
const fulfilledThenable = boundPromise; | ||
fulfilledThenable.status = 'fulfilled'; | ||
fulfilledThenable.value = boundArgs; | ||
}, error => { | ||
const rejectedThenable = boundPromise; | ||
rejectedThenable.status = 'rejected'; | ||
rejectedThenable.reason = error; | ||
}); | ||
} | ||
throw boundPromise; | ||
} | ||
throw ( | ||
("string" !== typeof boundPromise.status && | ||
((boundPromise.status = "pending"), | ||
boundPromise.then( | ||
function (boundArgs) { | ||
boundPromise.status = "fulfilled"; | ||
boundPromise.value = boundArgs; | ||
}, | ||
function (error) { | ||
boundPromise.status = "rejected"; | ||
boundPromise.reason = error; | ||
} | ||
)), | ||
boundPromise) | ||
); | ||
} | ||
} | ||
function registerServerReference(proxy, reference) { | ||
// Expose encoder for use by SSR, as well as a special bind that can be used to | ||
// keep server capabilities. | ||
{ | ||
// Only expose this in builds that would actually use it. Not needed on the client. | ||
Object.defineProperties(proxy, { | ||
$$FORM_ACTION: { | ||
value: encodeFormAction | ||
}, | ||
$$IS_SIGNATURE_EQUAL: { | ||
value: isSignatureEqual | ||
}, | ||
bind: { | ||
value: bind | ||
} | ||
}); | ||
} | ||
knownServerReferences.set(proxy, reference); | ||
} // $FlowFixMe[method-unbinding] | ||
const FunctionBind = Function.prototype.bind; // $FlowFixMe[method-unbinding] | ||
const ArraySlice = Array.prototype.slice; | ||
function registerServerReference(proxy, reference$jscomp$0, encodeFormAction) { | ||
Object.defineProperties(proxy, { | ||
$$FORM_ACTION: { | ||
value: | ||
void 0 === encodeFormAction | ||
? defaultEncodeFormAction | ||
: function () { | ||
var reference = knownServerReferences.get(this); | ||
if (!reference) | ||
throw Error( | ||
"Tried to encode a Server Action from a different instance than the encoder is from. This is a bug in React." | ||
); | ||
var boundPromise = reference.bound; | ||
null === boundPromise && (boundPromise = Promise.resolve([])); | ||
return encodeFormAction(reference.id, boundPromise); | ||
} | ||
}, | ||
$$IS_SIGNATURE_EQUAL: { value: isSignatureEqual }, | ||
bind: { value: bind } | ||
}); | ||
knownServerReferences.set(proxy, reference$jscomp$0); | ||
} | ||
var FunctionBind = Function.prototype.bind, | ||
ArraySlice = Array.prototype.slice; | ||
function bind() { | ||
// $FlowFixMe[unsupported-syntax] | ||
const newFn = FunctionBind.apply(this, arguments); | ||
const reference = knownServerReferences.get(this); | ||
var newFn = FunctionBind.apply(this, arguments), | ||
reference = knownServerReferences.get(this); | ||
if (reference) { | ||
const args = ArraySlice.call(arguments, 1); | ||
let boundPromise = null; | ||
if (reference.bound !== null) { | ||
boundPromise = Promise.resolve(reference.bound).then(boundArgs => boundArgs.concat(args)); | ||
} else { | ||
boundPromise = Promise.resolve(args); | ||
} | ||
registerServerReference(newFn, { | ||
id: reference.id, | ||
bound: boundPromise | ||
var args = ArraySlice.call(arguments, 1), | ||
boundPromise = null; | ||
boundPromise = | ||
null !== reference.bound | ||
? Promise.resolve(reference.bound).then(function (boundArgs) { | ||
return boundArgs.concat(args); | ||
}) | ||
: Promise.resolve(args); | ||
Object.defineProperties(newFn, { | ||
$$FORM_ACTION: { value: this.$$FORM_ACTION }, | ||
$$IS_SIGNATURE_EQUAL: { value: isSignatureEqual }, | ||
bind: { value: bind } | ||
}); | ||
knownServerReferences.set(newFn, { id: reference.id, bound: boundPromise }); | ||
} | ||
return newFn; | ||
} | ||
function createServerReference$1(id, callServer) { | ||
const proxy = function () { | ||
// $FlowFixMe[method-unbinding] | ||
const args = Array.prototype.slice.call(arguments); | ||
function createBoundServerReference(metaData, callServer, encodeFormAction) { | ||
function action() { | ||
var args = Array.prototype.slice.call(arguments); | ||
return bound | ||
? "fulfilled" === bound.status | ||
? callServer(id, bound.value.concat(args)) | ||
: Promise.resolve(bound).then(function (boundArgs) { | ||
return callServer(id, boundArgs.concat(args)); | ||
}) | ||
: callServer(id, args); | ||
} | ||
var id = metaData.id, | ||
bound = metaData.bound; | ||
registerServerReference(action, { id: id, bound: bound }, encodeFormAction); | ||
return action; | ||
} | ||
function createServerReference$1(id, callServer, encodeFormAction) { | ||
function action() { | ||
var args = Array.prototype.slice.call(arguments); | ||
return callServer(id, args); | ||
}; | ||
registerServerReference(proxy, { | ||
id, | ||
bound: null | ||
}); | ||
return proxy; | ||
} | ||
registerServerReference(action, { id: id, bound: null }, encodeFormAction); | ||
return action; | ||
} | ||
const ROW_ID = 0; | ||
const ROW_TAG = 1; | ||
const ROW_LENGTH = 2; | ||
const ROW_CHUNK_BY_NEWLINE = 3; | ||
const ROW_CHUNK_BY_LENGTH = 4; | ||
const PENDING = 'pending'; | ||
const BLOCKED = 'blocked'; | ||
const CYCLIC = 'cyclic'; | ||
const RESOLVED_MODEL = 'resolved_model'; | ||
const RESOLVED_MODULE = 'resolved_module'; | ||
const INITIALIZED = 'fulfilled'; | ||
const ERRORED = 'rejected'; // Dev-only | ||
// $FlowFixMe[missing-this-annot] | ||
function Chunk(status, value, reason, response) { | ||
function ReactPromise(status, value, reason, response) { | ||
this.status = status; | ||
@@ -813,78 +652,41 @@ this.value = value; | ||
this._response = response; | ||
} // We subclass Promise.prototype so that we get other methods like .catch | ||
Chunk.prototype = Object.create(Promise.prototype); // TODO: This doesn't return a new Promise chain unlike the real .then | ||
Chunk.prototype.then = function (resolve, reject) { | ||
const chunk = this; // If we have resolved content, we try to initialize it first which | ||
// might put us back into one of the other states. | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
} | ||
ReactPromise.prototype = Object.create(Promise.prototype); | ||
ReactPromise.prototype.then = function (resolve, reject) { | ||
switch (this.status) { | ||
case "resolved_model": | ||
initializeModelChunk(this); | ||
break; | ||
case RESOLVED_MODULE: | ||
initializeModuleChunk(chunk); | ||
case "resolved_module": | ||
initializeModuleChunk(this); | ||
} | ||
switch (this.status) { | ||
case "fulfilled": | ||
resolve(this.value); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
resolve(chunk.value); | ||
case "pending": | ||
case "blocked": | ||
resolve && | ||
(null === this.value && (this.value = []), this.value.push(resolve)); | ||
reject && | ||
(null === this.reason && (this.reason = []), this.reason.push(reject)); | ||
break; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
if (resolve) { | ||
if (chunk.value === null) { | ||
chunk.value = []; | ||
} | ||
chunk.value.push(resolve); | ||
} | ||
if (reject) { | ||
if (chunk.reason === null) { | ||
chunk.reason = []; | ||
} | ||
chunk.reason.push(reject); | ||
} | ||
break; | ||
default: | ||
reject(chunk.reason); | ||
break; | ||
reject && reject(this.reason); | ||
} | ||
}; | ||
function readChunk(chunk) { | ||
// If we have resolved content, we try to initialize it first which | ||
// might put us back into one of the other states. | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
case "resolved_model": | ||
initializeModelChunk(chunk); | ||
break; | ||
case RESOLVED_MODULE: | ||
case "resolved_module": | ||
initializeModuleChunk(chunk); | ||
break; | ||
} // The status might have changed after initialization. | ||
} | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
case "fulfilled": | ||
return chunk.value; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
// eslint-disable-next-line no-throw-literal | ||
case "pending": | ||
case "blocked": | ||
throw chunk; | ||
default: | ||
@@ -894,1017 +696,1135 @@ throw chunk.reason; | ||
} | ||
function getRoot(response) { | ||
const chunk = getChunk(response, 0); | ||
return chunk; | ||
} | ||
function createPendingChunk(response) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(PENDING, null, null, response); | ||
return new ReactPromise("pending", null, null, response); | ||
} | ||
function createBlockedChunk(response) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(BLOCKED, null, null, response); | ||
} | ||
function createErrorChunk(response, error) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(ERRORED, null, error, response); | ||
} | ||
function wakeChunk(listeners, value) { | ||
for (let i = 0; i < listeners.length; i++) { | ||
const listener = listeners[i]; | ||
listener(value); | ||
} | ||
for (var i = 0; i < listeners.length; i++) (0, listeners[i])(value); | ||
} | ||
function wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners) { | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
case "fulfilled": | ||
wakeChunk(resolveListeners, chunk.value); | ||
break; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
chunk.value = resolveListeners; | ||
chunk.reason = rejectListeners; | ||
case "pending": | ||
case "blocked": | ||
if (chunk.value) | ||
for (var i = 0; i < resolveListeners.length; i++) | ||
chunk.value.push(resolveListeners[i]); | ||
else chunk.value = resolveListeners; | ||
if (chunk.reason) { | ||
if (rejectListeners) | ||
for ( | ||
resolveListeners = 0; | ||
resolveListeners < rejectListeners.length; | ||
resolveListeners++ | ||
) | ||
chunk.reason.push(rejectListeners[resolveListeners]); | ||
} else chunk.reason = rejectListeners; | ||
break; | ||
case ERRORED: | ||
if (rejectListeners) { | ||
wakeChunk(rejectListeners, chunk.reason); | ||
} | ||
break; | ||
case "rejected": | ||
rejectListeners && wakeChunk(rejectListeners, chunk.reason); | ||
} | ||
} | ||
function triggerErrorOnChunk(chunk, error) { | ||
if (chunk.status !== PENDING && chunk.status !== BLOCKED) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" !== chunk.status && "blocked" !== chunk.status) | ||
chunk.reason.error(error); | ||
else { | ||
var listeners = chunk.reason; | ||
chunk.status = "rejected"; | ||
chunk.reason = error; | ||
null !== listeners && wakeChunk(listeners, error); | ||
} | ||
const listeners = chunk.reason; | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
if (listeners !== null) { | ||
wakeChunk(listeners, error); | ||
} | ||
} | ||
function createResolvedModelChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(RESOLVED_MODEL, value, null, response); | ||
function createResolvedIteratorResultChunk(response, value, done) { | ||
return new ReactPromise( | ||
"resolved_model", | ||
(done ? '{"done":true,"value":' : '{"done":false,"value":') + value + "}", | ||
null, | ||
response | ||
); | ||
} | ||
function createResolvedModuleChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(RESOLVED_MODULE, value, null, response); | ||
function resolveIteratorResultChunk(chunk, value, done) { | ||
resolveModelChunk( | ||
chunk, | ||
(done ? '{"done":true,"value":' : '{"done":false,"value":') + value + "}" | ||
); | ||
} | ||
function createInitializedTextChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(INITIALIZED, value, null, response); | ||
} | ||
function createInitializedBufferChunk(response, value) { | ||
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors | ||
return new Chunk(INITIALIZED, value, null, response); | ||
} | ||
function resolveModelChunk(chunk, value) { | ||
if (chunk.status !== PENDING) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" !== chunk.status) chunk.reason.enqueueModel(value); | ||
else { | ||
var resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "resolved_model"; | ||
chunk.value = value; | ||
null !== resolveListeners && | ||
(initializeModelChunk(chunk), | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners)); | ||
} | ||
const resolveListeners = chunk.value; | ||
const rejectListeners = chunk.reason; | ||
const resolvedChunk = chunk; | ||
resolvedChunk.status = RESOLVED_MODEL; | ||
resolvedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
// This is unfortunate that we're reading this eagerly if | ||
// we already have listeners attached since they might no | ||
// longer be rendered or might not be the highest pri. | ||
initializeModelChunk(resolvedChunk); // The status might have changed after initialization. | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
} | ||
function resolveModuleChunk(chunk, value) { | ||
if (chunk.status !== PENDING && chunk.status !== BLOCKED) { | ||
// We already resolved. We didn't expect to see this. | ||
return; | ||
if ("pending" === chunk.status || "blocked" === chunk.status) { | ||
var resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "resolved_module"; | ||
chunk.value = value; | ||
null !== resolveListeners && | ||
(initializeModuleChunk(chunk), | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners)); | ||
} | ||
const resolveListeners = chunk.value; | ||
const rejectListeners = chunk.reason; | ||
const resolvedChunk = chunk; | ||
resolvedChunk.status = RESOLVED_MODULE; | ||
resolvedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
initializeModuleChunk(resolvedChunk); | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
} | ||
let initializingChunk = null; | ||
let initializingChunkBlockedModel = null; | ||
var initializingHandler = null; | ||
function initializeModelChunk(chunk) { | ||
const prevChunk = initializingChunk; | ||
const prevBlocked = initializingChunkBlockedModel; | ||
initializingChunk = chunk; | ||
initializingChunkBlockedModel = null; | ||
const resolvedModel = chunk.value; // We go to the CYCLIC state until we've fully resolved this. | ||
// We do this before parsing in case we try to initialize the same chunk | ||
// while parsing the model. Such as in a cyclic reference. | ||
const cyclicChunk = chunk; | ||
cyclicChunk.status = CYCLIC; | ||
cyclicChunk.value = null; | ||
cyclicChunk.reason = null; | ||
var prevHandler = initializingHandler; | ||
initializingHandler = null; | ||
var resolvedModel = chunk.value; | ||
chunk.status = "blocked"; | ||
chunk.value = null; | ||
chunk.reason = null; | ||
try { | ||
const value = parseModel(chunk._response, resolvedModel); | ||
if (initializingChunkBlockedModel !== null && initializingChunkBlockedModel.deps > 0) { | ||
initializingChunkBlockedModel.value = value; // We discovered new dependencies on modules that are not yet resolved. | ||
// We have to go the BLOCKED state until they're resolved. | ||
const blockedChunk = chunk; | ||
blockedChunk.status = BLOCKED; | ||
blockedChunk.value = null; | ||
blockedChunk.reason = null; | ||
} else { | ||
const resolveListeners = cyclicChunk.value; | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = value; | ||
if (resolveListeners !== null) { | ||
wakeChunk(resolveListeners, value); | ||
var value = JSON.parse(resolvedModel, chunk._response._fromJSON), | ||
resolveListeners = chunk.value; | ||
null !== resolveListeners && | ||
((chunk.value = null), | ||
(chunk.reason = null), | ||
wakeChunk(resolveListeners, value)); | ||
if (null !== initializingHandler) { | ||
if (initializingHandler.errored) throw initializingHandler.value; | ||
if (0 < initializingHandler.deps) { | ||
initializingHandler.value = value; | ||
initializingHandler.chunk = chunk; | ||
return; | ||
} | ||
} | ||
chunk.status = "fulfilled"; | ||
chunk.value = value; | ||
} catch (error) { | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
(chunk.status = "rejected"), (chunk.reason = error); | ||
} finally { | ||
initializingChunk = prevChunk; | ||
initializingChunkBlockedModel = prevBlocked; | ||
initializingHandler = prevHandler; | ||
} | ||
} | ||
function initializeModuleChunk(chunk) { | ||
try { | ||
const value = requireModule(chunk.value); | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = value; | ||
var value = requireModule(chunk.value); | ||
chunk.status = "fulfilled"; | ||
chunk.value = value; | ||
} catch (error) { | ||
const erroredChunk = chunk; | ||
erroredChunk.status = ERRORED; | ||
erroredChunk.reason = error; | ||
(chunk.status = "rejected"), (chunk.reason = error); | ||
} | ||
} // Report that any missing chunks in the model is now going to throw this | ||
// error upon read. Also notify any pending promises. | ||
} | ||
function reportGlobalError(response, error) { | ||
response._chunks.forEach(chunk => { | ||
// If this chunk was already resolved or errored, it won't | ||
// trigger an error but if it wasn't then we need to | ||
// because we won't be getting any new data to resolve it. | ||
if (chunk.status === PENDING) { | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
response._chunks.forEach(function (chunk) { | ||
"pending" === chunk.status && triggerErrorOnChunk(chunk, error); | ||
}); | ||
} | ||
function createElement(type, key, props) { | ||
const element = { | ||
// This tag allows us to uniquely identify this as a React Element | ||
$$typeof: REACT_ELEMENT_TYPE, | ||
// Built-in properties that belong on the element | ||
type: type, | ||
key: key, | ||
ref: null, | ||
props: props, | ||
// Record the component responsible for creating this element. | ||
_owner: null | ||
}; | ||
return element; | ||
} | ||
function createLazyChunkWrapper(chunk) { | ||
const lazyType = { | ||
$$typeof: REACT_LAZY_TYPE, | ||
_payload: chunk, | ||
_init: readChunk | ||
}; | ||
return lazyType; | ||
return { $$typeof: REACT_LAZY_TYPE, _payload: chunk, _init: readChunk }; | ||
} | ||
function getChunk(response, id) { | ||
const chunks = response._chunks; | ||
let chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunk = createPendingChunk(response); | ||
chunks.set(id, chunk); | ||
} | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk || ((chunk = createPendingChunk(response)), chunks.set(id, chunk)); | ||
return chunk; | ||
} | ||
function createModelResolver(chunk, parentObject, key, cyclic) { | ||
let blocked; | ||
if (initializingChunkBlockedModel) { | ||
blocked = initializingChunkBlockedModel; | ||
if (!cyclic) { | ||
blocked.deps++; | ||
function waitForReference( | ||
referencedChunk, | ||
parentObject, | ||
key, | ||
response, | ||
map, | ||
path | ||
) { | ||
function fulfill(value) { | ||
for (var i = 1; i < path.length; i++) { | ||
for (; value.$$typeof === REACT_LAZY_TYPE; ) | ||
if (((value = value._payload), value === handler.chunk)) | ||
value = handler.value; | ||
else if ("fulfilled" === value.status) value = value.value; | ||
else { | ||
path.splice(0, i - 1); | ||
value.then(fulfill, reject); | ||
return; | ||
} | ||
value = value[path[i]]; | ||
} | ||
} else { | ||
blocked = initializingChunkBlockedModel = { | ||
deps: cyclic ? 0 : 1, | ||
value: null | ||
i = map(response, value, parentObject, key); | ||
parentObject[key] = i; | ||
"" === key && null === handler.value && (handler.value = i); | ||
if ( | ||
parentObject[0] === REACT_ELEMENT_TYPE && | ||
"object" === typeof handler.value && | ||
null !== handler.value && | ||
handler.value.$$typeof === REACT_ELEMENT_TYPE | ||
) | ||
switch (((value = handler.value), key)) { | ||
case "3": | ||
value.props = i; | ||
} | ||
handler.deps--; | ||
0 === handler.deps && | ||
((i = handler.chunk), | ||
null !== i && | ||
"blocked" === i.status && | ||
((value = i.value), | ||
(i.status = "fulfilled"), | ||
(i.value = handler.value), | ||
null !== value && wakeChunk(value, handler.value))); | ||
} | ||
function reject(error) { | ||
if (!handler.errored) { | ||
handler.errored = !0; | ||
handler.value = error; | ||
var chunk = handler.chunk; | ||
null !== chunk && | ||
"blocked" === chunk.status && | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
} | ||
if (initializingHandler) { | ||
var handler = initializingHandler; | ||
handler.deps++; | ||
} else | ||
handler = initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: null, | ||
deps: 1, | ||
errored: !1 | ||
}; | ||
} | ||
return value => { | ||
parentObject[key] = value; | ||
blocked.deps--; | ||
if (blocked.deps === 0) { | ||
if (chunk.status !== BLOCKED) { | ||
return; | ||
referencedChunk.then(fulfill, reject); | ||
return null; | ||
} | ||
function loadServerReference(response, metaData, parentObject, key) { | ||
if (!response._serverReferenceConfig) | ||
return createBoundServerReference( | ||
metaData, | ||
response._callServer, | ||
response._encodeFormAction | ||
); | ||
var serverReference = resolveServerReference( | ||
response._serverReferenceConfig, | ||
metaData.id | ||
); | ||
if ((response = preloadModule(serverReference))) | ||
metaData.bound && (response = Promise.all([response, metaData.bound])); | ||
else if (metaData.bound) response = Promise.resolve(metaData.bound); | ||
else return requireModule(serverReference); | ||
if (initializingHandler) { | ||
var handler = initializingHandler; | ||
handler.deps++; | ||
} else | ||
handler = initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: null, | ||
deps: 1, | ||
errored: !1 | ||
}; | ||
response.then( | ||
function () { | ||
var resolvedValue = requireModule(serverReference); | ||
if (metaData.bound) { | ||
var boundArgs = metaData.bound.value.slice(0); | ||
boundArgs.unshift(null); | ||
resolvedValue = resolvedValue.bind.apply(resolvedValue, boundArgs); | ||
} | ||
const resolveListeners = chunk.value; | ||
const initializedChunk = chunk; | ||
initializedChunk.status = INITIALIZED; | ||
initializedChunk.value = blocked.value; | ||
if (resolveListeners !== null) { | ||
wakeChunk(resolveListeners, blocked.value); | ||
parentObject[key] = resolvedValue; | ||
"" === key && null === handler.value && (handler.value = resolvedValue); | ||
if ( | ||
parentObject[0] === REACT_ELEMENT_TYPE && | ||
"object" === typeof handler.value && | ||
null !== handler.value && | ||
handler.value.$$typeof === REACT_ELEMENT_TYPE | ||
) | ||
switch (((boundArgs = handler.value), key)) { | ||
case "3": | ||
boundArgs.props = resolvedValue; | ||
} | ||
handler.deps--; | ||
0 === handler.deps && | ||
((resolvedValue = handler.chunk), | ||
null !== resolvedValue && | ||
"blocked" === resolvedValue.status && | ||
((boundArgs = resolvedValue.value), | ||
(resolvedValue.status = "fulfilled"), | ||
(resolvedValue.value = handler.value), | ||
null !== boundArgs && wakeChunk(boundArgs, handler.value))); | ||
}, | ||
function (error) { | ||
if (!handler.errored) { | ||
handler.errored = !0; | ||
handler.value = error; | ||
var chunk = handler.chunk; | ||
null !== chunk && | ||
"blocked" === chunk.status && | ||
triggerErrorOnChunk(chunk, error); | ||
} | ||
} | ||
}; | ||
); | ||
return null; | ||
} | ||
function createModelReject(chunk) { | ||
return error => triggerErrorOnChunk(chunk, error); | ||
} | ||
function createServerReferenceProxy(response, metaData) { | ||
const callServer = response._callServer; | ||
const proxy = function () { | ||
// $FlowFixMe[method-unbinding] | ||
const args = Array.prototype.slice.call(arguments); | ||
const p = metaData.bound; | ||
if (!p) { | ||
return callServer(metaData.id, args); | ||
} | ||
if (p.status === INITIALIZED) { | ||
const bound = p.value; | ||
return callServer(metaData.id, bound.concat(args)); | ||
} // Since this is a fake Promise whose .then doesn't chain, we have to wrap it. | ||
// TODO: Remove the wrapper once that's fixed. | ||
return Promise.resolve(p).then(function (bound) { | ||
return callServer(metaData.id, bound.concat(args)); | ||
}); | ||
}; | ||
registerServerReference(proxy, metaData); | ||
return proxy; | ||
} | ||
function getOutlinedModel(response, id) { | ||
const chunk = getChunk(response, id); | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
function getOutlinedModel(response, reference, parentObject, key, map) { | ||
reference = reference.split(":"); | ||
var id = parseInt(reference[0], 16); | ||
id = getChunk(response, id); | ||
switch (id.status) { | ||
case "resolved_model": | ||
initializeModelChunk(id); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
{ | ||
return chunk.value; | ||
case "resolved_module": | ||
initializeModuleChunk(id); | ||
} | ||
switch (id.status) { | ||
case "fulfilled": | ||
var value = id.value; | ||
for (id = 1; id < reference.length; id++) { | ||
for (; value.$$typeof === REACT_LAZY_TYPE; ) | ||
if (((value = value._payload), "fulfilled" === value.status)) | ||
value = value.value; | ||
else | ||
return waitForReference( | ||
value, | ||
parentObject, | ||
key, | ||
response, | ||
map, | ||
reference.slice(id - 1) | ||
); | ||
value = value[reference[id]]; | ||
} | ||
// We always encode it first in the stream so it won't be pending. | ||
return map(response, value, parentObject, key); | ||
case "pending": | ||
case "blocked": | ||
return waitForReference(id, parentObject, key, response, map, reference); | ||
default: | ||
throw chunk.reason; | ||
return ( | ||
initializingHandler | ||
? ((initializingHandler.errored = !0), | ||
(initializingHandler.value = id.reason)) | ||
: (initializingHandler = { | ||
parent: null, | ||
chunk: null, | ||
value: id.reason, | ||
deps: 0, | ||
errored: !0 | ||
}), | ||
null | ||
); | ||
} | ||
} | ||
function createMap(response, model) { | ||
return new Map(model); | ||
} | ||
function createSet(response, model) { | ||
return new Set(model); | ||
} | ||
function createBlob(response, model) { | ||
return new Blob(model.slice(1), { type: model[0] }); | ||
} | ||
function createFormData(response, model) { | ||
response = new FormData(); | ||
for (var i = 0; i < model.length; i++) | ||
response.append(model[i][0], model[i][1]); | ||
return response; | ||
} | ||
function extractIterator(response, model) { | ||
return model[Symbol.iterator](); | ||
} | ||
function createModel(response, model) { | ||
return model; | ||
} | ||
function parseModelString(response, parentObject, key, value) { | ||
if (value[0] === '$') { | ||
if (value === '$') { | ||
// A very common symbol. | ||
return REACT_ELEMENT_TYPE; | ||
} | ||
if ("$" === value[0]) { | ||
if ("$" === value) | ||
return ( | ||
null !== initializingHandler && | ||
"0" === key && | ||
(initializingHandler = { | ||
parent: initializingHandler, | ||
chunk: null, | ||
value: null, | ||
deps: 0, | ||
errored: !1 | ||
}), | ||
REACT_ELEMENT_TYPE | ||
); | ||
switch (value[1]) { | ||
case '$': | ||
{ | ||
// This was an escaped string value. | ||
return value.slice(1); | ||
} | ||
case 'L': | ||
{ | ||
// Lazy node | ||
const id = parseInt(value.slice(2), 16); | ||
const chunk = getChunk(response, id); // We create a React.lazy wrapper around any lazy values. | ||
// When passed into React, we'll know how to suspend on this. | ||
return createLazyChunkWrapper(chunk); | ||
} | ||
case '@': | ||
{ | ||
// Promise | ||
const id = parseInt(value.slice(2), 16); | ||
const chunk = getChunk(response, id); | ||
return chunk; | ||
} | ||
case 'S': | ||
{ | ||
// Symbol | ||
return Symbol.for(value.slice(2)); | ||
} | ||
case 'F': | ||
{ | ||
// Server Reference | ||
const id = parseInt(value.slice(2), 16); | ||
const metadata = getOutlinedModel(response, id); | ||
return createServerReferenceProxy(response, metadata); | ||
} | ||
case 'Q': | ||
{ | ||
// Map | ||
const id = parseInt(value.slice(2), 16); | ||
const data = getOutlinedModel(response, id); | ||
return new Map(data); | ||
} | ||
case 'W': | ||
{ | ||
// Set | ||
const id = parseInt(value.slice(2), 16); | ||
const data = getOutlinedModel(response, id); | ||
return new Set(data); | ||
} | ||
case 'I': | ||
{ | ||
// $Infinity | ||
return Infinity; | ||
} | ||
case '-': | ||
{ | ||
// $-0 or $-Infinity | ||
if (value === '$-0') { | ||
return -0; | ||
} else { | ||
return -Infinity; | ||
} | ||
} | ||
case 'N': | ||
{ | ||
// $NaN | ||
return NaN; | ||
} | ||
case 'u': | ||
{ | ||
// matches "$undefined" | ||
// Special encoding for `undefined` which can't be serialized as JSON otherwise. | ||
return undefined; | ||
} | ||
case 'D': | ||
{ | ||
// Date | ||
return new Date(Date.parse(value.slice(2))); | ||
} | ||
case 'n': | ||
{ | ||
// BigInt | ||
return BigInt(value.slice(2)); | ||
} | ||
case "$": | ||
return value.slice(1); | ||
case "L": | ||
return ( | ||
(parentObject = parseInt(value.slice(2), 16)), | ||
(response = getChunk(response, parentObject)), | ||
createLazyChunkWrapper(response) | ||
); | ||
case "@": | ||
if (2 === value.length) return new Promise(function () {}); | ||
parentObject = parseInt(value.slice(2), 16); | ||
return getChunk(response, parentObject); | ||
case "S": | ||
return Symbol.for(value.slice(2)); | ||
case "F": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel( | ||
response, | ||
value, | ||
parentObject, | ||
key, | ||
loadServerReference | ||
) | ||
); | ||
case "T": | ||
parentObject = "$" + value.slice(2); | ||
response = response._tempRefs; | ||
if (null == response) | ||
throw Error( | ||
"Missing a temporary reference set but the RSC response returned a temporary reference. Pass a temporaryReference option with the set that was used with the reply." | ||
); | ||
return response.get(parentObject); | ||
case "Q": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createMap) | ||
); | ||
case "W": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createSet) | ||
); | ||
case "B": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createBlob) | ||
); | ||
case "K": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, createFormData) | ||
); | ||
case "Z": | ||
return resolveErrorProd(); | ||
case "i": | ||
return ( | ||
(value = value.slice(2)), | ||
getOutlinedModel(response, value, parentObject, key, extractIterator) | ||
); | ||
case "I": | ||
return Infinity; | ||
case "-": | ||
return "$-0" === value ? -0 : -Infinity; | ||
case "N": | ||
return NaN; | ||
case "u": | ||
return; | ||
case "D": | ||
return new Date(Date.parse(value.slice(2))); | ||
case "n": | ||
return BigInt(value.slice(2)); | ||
default: | ||
{ | ||
// We assume that anything else is a reference ID. | ||
const id = parseInt(value.slice(1), 16); | ||
const chunk = getChunk(response, id); | ||
switch (chunk.status) { | ||
case RESOLVED_MODEL: | ||
initializeModelChunk(chunk); | ||
break; | ||
case RESOLVED_MODULE: | ||
initializeModuleChunk(chunk); | ||
break; | ||
} // The status might have changed after initialization. | ||
switch (chunk.status) { | ||
case INITIALIZED: | ||
const chunkValue = chunk.value; | ||
return chunkValue; | ||
case PENDING: | ||
case BLOCKED: | ||
case CYCLIC: | ||
const parentChunk = initializingChunk; | ||
chunk.then(createModelResolver(parentChunk, parentObject, key, chunk.status === CYCLIC), createModelReject(parentChunk)); | ||
return null; | ||
default: | ||
throw chunk.reason; | ||
} | ||
} | ||
return ( | ||
(value = value.slice(1)), | ||
getOutlinedModel(response, value, parentObject, key, createModel) | ||
); | ||
} | ||
} | ||
return value; | ||
} | ||
function parseModelTuple(response, value) { | ||
const tuple = value; | ||
if (tuple[0] === REACT_ELEMENT_TYPE) { | ||
// TODO: Consider having React just directly accept these arrays as elements. | ||
// Or even change the ReactElement type to be an array. | ||
return createElement(tuple[1], tuple[2], tuple[3]); | ||
} | ||
return value; | ||
} | ||
function missingCall() { | ||
throw new Error('Trying to call a function from "use server" but the callServer option ' + 'was not implemented in your router runtime.'); | ||
throw Error( | ||
'Trying to call a function from "use server" but the callServer option was not implemented in your router runtime.' | ||
); | ||
} | ||
function createResponse(bundlerConfig, moduleLoading, callServer, nonce) { | ||
const chunks = new Map(); | ||
const response = { | ||
_bundlerConfig: bundlerConfig, | ||
_moduleLoading: moduleLoading, | ||
_callServer: callServer !== undefined ? callServer : missingCall, | ||
_nonce: nonce, | ||
_chunks: chunks, | ||
_stringDecoder: createStringDecoder(), | ||
_fromJSON: null, | ||
_rowState: 0, | ||
_rowID: 0, | ||
_rowTag: 0, | ||
_rowLength: 0, | ||
_buffer: [] | ||
}; // Don't inline this call because it causes closure to outline the call above. | ||
response._fromJSON = createFromJSONCallback(response); | ||
return response; | ||
function ResponseInstance( | ||
bundlerConfig, | ||
serverReferenceConfig, | ||
moduleLoading, | ||
callServer, | ||
encodeFormAction, | ||
nonce, | ||
temporaryReferences | ||
) { | ||
var chunks = new Map(); | ||
this._bundlerConfig = bundlerConfig; | ||
this._serverReferenceConfig = serverReferenceConfig; | ||
this._moduleLoading = moduleLoading; | ||
this._callServer = void 0 !== callServer ? callServer : missingCall; | ||
this._encodeFormAction = encodeFormAction; | ||
this._nonce = nonce; | ||
this._chunks = chunks; | ||
this._stringDecoder = new util.TextDecoder(); | ||
this._fromJSON = null; | ||
this._rowLength = this._rowTag = this._rowID = this._rowState = 0; | ||
this._buffer = []; | ||
this._tempRefs = temporaryReferences; | ||
this._fromJSON = createFromJSONCallback(this); | ||
} | ||
function resolveModel(response, id, model) { | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createResolvedModelChunk(response, model)); | ||
} else { | ||
resolveModelChunk(chunk, model); | ||
} | ||
function resolveBuffer(response, id, buffer) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk && "pending" !== chunk.status | ||
? chunk.reason.enqueueValue(buffer) | ||
: chunks.set(id, new ReactPromise("fulfilled", buffer, null, response)); | ||
} | ||
function resolveText(response, id, text) { | ||
const chunks = response._chunks; // We assume that we always reference large strings after they've been | ||
// emitted. | ||
chunks.set(id, createInitializedTextChunk(response, text)); | ||
function resolveModule(response, id, model) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
model = JSON.parse(model, response._fromJSON); | ||
var clientReference = resolveClientReference(response._bundlerConfig, model); | ||
prepareDestinationWithChunks( | ||
response._moduleLoading, | ||
model[1], | ||
response._nonce | ||
); | ||
if ((model = preloadModule(clientReference))) { | ||
if (chunk) { | ||
var blockedChunk = chunk; | ||
blockedChunk.status = "blocked"; | ||
} else | ||
(blockedChunk = new ReactPromise("blocked", null, null, response)), | ||
chunks.set(id, blockedChunk); | ||
model.then( | ||
function () { | ||
return resolveModuleChunk(blockedChunk, clientReference); | ||
}, | ||
function (error) { | ||
return triggerErrorOnChunk(blockedChunk, error); | ||
} | ||
); | ||
} else | ||
chunk | ||
? resolveModuleChunk(chunk, clientReference) | ||
: chunks.set( | ||
id, | ||
new ReactPromise("resolved_module", clientReference, null, response) | ||
); | ||
} | ||
function resolveBuffer(response, id, buffer) { | ||
const chunks = response._chunks; // We assume that we always reference buffers after they've been emitted. | ||
chunks.set(id, createInitializedBufferChunk(response, buffer)); | ||
function resolveStream(response, id, stream, controller) { | ||
var chunks = response._chunks, | ||
chunk = chunks.get(id); | ||
chunk | ||
? "pending" === chunk.status && | ||
((response = chunk.value), | ||
(chunk.status = "fulfilled"), | ||
(chunk.value = stream), | ||
(chunk.reason = controller), | ||
null !== response && wakeChunk(response, chunk.value)) | ||
: chunks.set( | ||
id, | ||
new ReactPromise("fulfilled", stream, controller, response) | ||
); | ||
} | ||
function resolveModule(response, id, model) { | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
const clientReferenceMetadata = parseModel(response, model); | ||
const clientReference = resolveClientReference(response._bundlerConfig, clientReferenceMetadata); | ||
prepareDestinationForModule(response._moduleLoading, response._nonce, clientReferenceMetadata); // TODO: Add an option to encode modules that are lazy loaded. | ||
// For now we preload all modules as early as possible since it's likely | ||
// that we'll need them. | ||
const promise = preloadModule(clientReference); | ||
if (promise) { | ||
let blockedChunk; | ||
if (!chunk) { | ||
// Technically, we should just treat promise as the chunk in this | ||
// case. Because it'll just behave as any other promise. | ||
blockedChunk = createBlockedChunk(response); | ||
chunks.set(id, blockedChunk); | ||
} else { | ||
// This can't actually happen because we don't have any forward | ||
// references to modules. | ||
blockedChunk = chunk; | ||
blockedChunk.status = BLOCKED; | ||
function startReadableStream(response, id, type) { | ||
var controller = null; | ||
type = new ReadableStream({ | ||
type: type, | ||
start: function (c) { | ||
controller = c; | ||
} | ||
promise.then(() => resolveModuleChunk(blockedChunk, clientReference), error => triggerErrorOnChunk(blockedChunk, error)); | ||
} else { | ||
if (!chunk) { | ||
chunks.set(id, createResolvedModuleChunk(response, clientReference)); | ||
} else { | ||
// This can't actually happen because we don't have any forward | ||
// references to modules. | ||
resolveModuleChunk(chunk, clientReference); | ||
}); | ||
var previousBlockedChunk = null; | ||
resolveStream(response, id, type, { | ||
enqueueValue: function (value) { | ||
null === previousBlockedChunk | ||
? controller.enqueue(value) | ||
: previousBlockedChunk.then(function () { | ||
controller.enqueue(value); | ||
}); | ||
}, | ||
enqueueModel: function (json) { | ||
if (null === previousBlockedChunk) { | ||
var chunk = new ReactPromise("resolved_model", json, null, response); | ||
initializeModelChunk(chunk); | ||
"fulfilled" === chunk.status | ||
? controller.enqueue(chunk.value) | ||
: (chunk.then( | ||
function (v) { | ||
return controller.enqueue(v); | ||
}, | ||
function (e) { | ||
return controller.error(e); | ||
} | ||
), | ||
(previousBlockedChunk = chunk)); | ||
} else { | ||
chunk = previousBlockedChunk; | ||
var chunk$52 = createPendingChunk(response); | ||
chunk$52.then( | ||
function (v) { | ||
return controller.enqueue(v); | ||
}, | ||
function (e) { | ||
return controller.error(e); | ||
} | ||
); | ||
previousBlockedChunk = chunk$52; | ||
chunk.then(function () { | ||
previousBlockedChunk === chunk$52 && (previousBlockedChunk = null); | ||
resolveModelChunk(chunk$52, json); | ||
}); | ||
} | ||
}, | ||
close: function () { | ||
if (null === previousBlockedChunk) controller.close(); | ||
else { | ||
var blockedChunk = previousBlockedChunk; | ||
previousBlockedChunk = null; | ||
blockedChunk.then(function () { | ||
return controller.close(); | ||
}); | ||
} | ||
}, | ||
error: function (error) { | ||
if (null === previousBlockedChunk) controller.error(error); | ||
else { | ||
var blockedChunk = previousBlockedChunk; | ||
previousBlockedChunk = null; | ||
blockedChunk.then(function () { | ||
return controller.error(error); | ||
}); | ||
} | ||
} | ||
} | ||
}); | ||
} | ||
function resolveErrorProd(response, id, digest) { | ||
const error = new Error('An error occurred in the Server Components render. The specific message is omitted in production' + ' builds to avoid leaking sensitive details. A digest property is included on this error instance which' + ' may provide additional details about the nature of the error.'); | ||
error.stack = 'Error: ' + error.message; | ||
error.digest = digest; | ||
const errorWithDigest = error; | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createErrorChunk(response, errorWithDigest)); | ||
} else { | ||
triggerErrorOnChunk(chunk, errorWithDigest); | ||
} | ||
function asyncIterator() { | ||
return this; | ||
} | ||
function resolvePostponeProd(response, id) { | ||
const error = new Error('A Server Component was postponed. The reason is omitted in production' + ' builds to avoid leaking sensitive details.'); | ||
const postponeInstance = error; | ||
postponeInstance.$$typeof = REACT_POSTPONE_TYPE; | ||
postponeInstance.stack = 'Error: ' + error.message; | ||
const chunks = response._chunks; | ||
const chunk = chunks.get(id); | ||
if (!chunk) { | ||
chunks.set(id, createErrorChunk(response, postponeInstance)); | ||
} else { | ||
triggerErrorOnChunk(chunk, postponeInstance); | ||
} | ||
function createIterator(next) { | ||
next = { next: next }; | ||
next[ASYNC_ITERATOR] = asyncIterator; | ||
return next; | ||
} | ||
function startAsyncIterable(response, id, iterator) { | ||
var buffer = [], | ||
closed = !1, | ||
nextWriteIndex = 0, | ||
$jscomp$compprop0 = {}; | ||
$jscomp$compprop0 = | ||
(($jscomp$compprop0[ASYNC_ITERATOR] = function () { | ||
var nextReadIndex = 0; | ||
return createIterator(function (arg) { | ||
if (void 0 !== arg) | ||
throw Error( | ||
"Values cannot be passed to next() of AsyncIterables passed to Client Components." | ||
); | ||
if (nextReadIndex === buffer.length) { | ||
if (closed) | ||
return new ReactPromise( | ||
"fulfilled", | ||
{ done: !0, value: void 0 }, | ||
null, | ||
response | ||
); | ||
buffer[nextReadIndex] = createPendingChunk(response); | ||
} | ||
return buffer[nextReadIndex++]; | ||
}); | ||
}), | ||
$jscomp$compprop0); | ||
resolveStream( | ||
response, | ||
id, | ||
iterator ? $jscomp$compprop0[ASYNC_ITERATOR]() : $jscomp$compprop0, | ||
{ | ||
enqueueValue: function (value) { | ||
if (nextWriteIndex === buffer.length) | ||
buffer[nextWriteIndex] = new ReactPromise( | ||
"fulfilled", | ||
{ done: !1, value: value }, | ||
null, | ||
response | ||
); | ||
else { | ||
var chunk = buffer[nextWriteIndex], | ||
resolveListeners = chunk.value, | ||
rejectListeners = chunk.reason; | ||
chunk.status = "fulfilled"; | ||
chunk.value = { done: !1, value: value }; | ||
null !== resolveListeners && | ||
wakeChunkIfInitialized(chunk, resolveListeners, rejectListeners); | ||
} | ||
nextWriteIndex++; | ||
}, | ||
enqueueModel: function (value) { | ||
nextWriteIndex === buffer.length | ||
? (buffer[nextWriteIndex] = createResolvedIteratorResultChunk( | ||
response, | ||
value, | ||
!1 | ||
)) | ||
: resolveIteratorResultChunk(buffer[nextWriteIndex], value, !1); | ||
nextWriteIndex++; | ||
}, | ||
close: function (value) { | ||
closed = !0; | ||
nextWriteIndex === buffer.length | ||
? (buffer[nextWriteIndex] = createResolvedIteratorResultChunk( | ||
response, | ||
value, | ||
!0 | ||
)) | ||
: resolveIteratorResultChunk(buffer[nextWriteIndex], value, !0); | ||
for (nextWriteIndex++; nextWriteIndex < buffer.length; ) | ||
resolveIteratorResultChunk( | ||
buffer[nextWriteIndex++], | ||
'"$undefined"', | ||
!0 | ||
); | ||
}, | ||
error: function (error) { | ||
closed = !0; | ||
for ( | ||
nextWriteIndex === buffer.length && | ||
(buffer[nextWriteIndex] = createPendingChunk(response)); | ||
nextWriteIndex < buffer.length; | ||
function resolveHint(response, code, model) { | ||
const hintModel = parseModel(response, model); | ||
dispatchHint(code, hintModel); | ||
) | ||
triggerErrorOnChunk(buffer[nextWriteIndex++], error); | ||
} | ||
} | ||
); | ||
} | ||
function resolveErrorProd() { | ||
var error = Error( | ||
"An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error." | ||
); | ||
error.stack = "Error: " + error.message; | ||
return error; | ||
} | ||
function mergeBuffer(buffer, lastChunk) { | ||
const l = buffer.length; // Count the bytes we'll need | ||
let byteLength = lastChunk.length; | ||
for (let i = 0; i < l; i++) { | ||
for (var l = buffer.length, byteLength = lastChunk.length, i = 0; i < l; i++) | ||
byteLength += buffer[i].byteLength; | ||
} // Allocate enough contiguous space | ||
const result = new Uint8Array(byteLength); | ||
let offset = 0; // Copy all the buffers into it. | ||
for (let i = 0; i < l; i++) { | ||
const chunk = buffer[i]; | ||
result.set(chunk, offset); | ||
offset += chunk.byteLength; | ||
byteLength = new Uint8Array(byteLength); | ||
for (var i$53 = (i = 0); i$53 < l; i$53++) { | ||
var chunk = buffer[i$53]; | ||
byteLength.set(chunk, i); | ||
i += chunk.byteLength; | ||
} | ||
result.set(lastChunk, offset); | ||
return result; | ||
byteLength.set(lastChunk, i); | ||
return byteLength; | ||
} | ||
function resolveTypedArray(response, id, buffer, lastChunk, constructor, bytesPerElement) { | ||
// If the view fits into one original buffer, we just reuse that buffer instead of | ||
// copying it out to a separate copy. This means that it's not always possible to | ||
// transfer these values to other threads without copying first since they may | ||
// share array buffer. For this to work, it must also have bytes aligned to a | ||
// multiple of a size of the type. | ||
const chunk = buffer.length === 0 && lastChunk.byteOffset % bytesPerElement === 0 ? lastChunk : mergeBuffer(buffer, lastChunk); // TODO: The transfer protocol of RSC is little-endian. If the client isn't little-endian | ||
// we should convert it instead. In practice big endian isn't really Web compatible so it's | ||
// somewhat safe to assume that browsers aren't going to run it, but maybe there's some SSR | ||
// server that's affected. | ||
const view = new constructor(chunk.buffer, chunk.byteOffset, chunk.byteLength / bytesPerElement); | ||
resolveBuffer(response, id, view); | ||
function resolveTypedArray( | ||
response, | ||
id, | ||
buffer, | ||
lastChunk, | ||
constructor, | ||
bytesPerElement | ||
) { | ||
buffer = | ||
0 === buffer.length && 0 === lastChunk.byteOffset % bytesPerElement | ||
? lastChunk | ||
: mergeBuffer(buffer, lastChunk); | ||
constructor = new constructor( | ||
buffer.buffer, | ||
buffer.byteOffset, | ||
buffer.byteLength / bytesPerElement | ||
); | ||
resolveBuffer(response, id, constructor); | ||
} | ||
function processFullRow(response, id, tag, buffer, chunk) { | ||
{ | ||
switch (tag) { | ||
case 65 | ||
/* "A" */ | ||
: | ||
// We must always clone to extract it into a separate buffer instead of just a view. | ||
resolveBuffer(response, id, mergeBuffer(buffer, chunk).buffer); | ||
return; | ||
case 67 | ||
/* "C" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int8Array, 1); | ||
return; | ||
case 99 | ||
/* "c" */ | ||
: | ||
resolveBuffer(response, id, buffer.length === 0 ? chunk : mergeBuffer(buffer, chunk)); | ||
return; | ||
case 85 | ||
/* "U" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint8ClampedArray, 1); | ||
return; | ||
case 83 | ||
/* "S" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int16Array, 2); | ||
return; | ||
case 115 | ||
/* "s" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint16Array, 2); | ||
return; | ||
case 76 | ||
/* "L" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Int32Array, 4); | ||
return; | ||
case 108 | ||
/* "l" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Uint32Array, 4); | ||
return; | ||
case 70 | ||
/* "F" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Float32Array, 4); | ||
return; | ||
case 100 | ||
/* "d" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, Float64Array, 8); | ||
return; | ||
case 78 | ||
/* "N" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, BigInt64Array, 8); | ||
return; | ||
case 109 | ||
/* "m" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, BigUint64Array, 8); | ||
return; | ||
case 86 | ||
/* "V" */ | ||
: | ||
resolveTypedArray(response, id, buffer, chunk, DataView, 1); | ||
return; | ||
} | ||
} | ||
const stringDecoder = response._stringDecoder; | ||
let row = ''; | ||
for (let i = 0; i < buffer.length; i++) { | ||
row += readPartialStringChunk(stringDecoder, buffer[i]); | ||
} | ||
row += readFinalStringChunk(stringDecoder, chunk); | ||
function processFullBinaryRow(response, id, tag, buffer, chunk) { | ||
switch (tag) { | ||
case 73 | ||
/* "I" */ | ||
: | ||
{ | ||
resolveModule(response, id, row); | ||
return; | ||
} | ||
case 72 | ||
/* "H" */ | ||
: | ||
{ | ||
const code = row[0]; | ||
resolveHint(response, code, row.slice(1)); | ||
return; | ||
} | ||
case 69 | ||
/* "E" */ | ||
: | ||
{ | ||
const errorInfo = JSON.parse(row); | ||
{ | ||
resolveErrorProd(response, id, errorInfo.digest); | ||
} | ||
return; | ||
} | ||
case 84 | ||
/* "T" */ | ||
: | ||
{ | ||
resolveText(response, id, row); | ||
return; | ||
} | ||
case 68 | ||
/* "D" */ | ||
: | ||
{ | ||
throw new Error('Failed to read a RSC payload created by a development version of React ' + 'on the server while using a production version on the client. Always use ' + 'matching versions on the server and the client.'); | ||
} | ||
case 80 | ||
/* "P" */ | ||
: | ||
{ | ||
{ | ||
{ | ||
resolvePostponeProd(response, id); | ||
} | ||
return; | ||
} | ||
} | ||
// Fallthrough | ||
default: | ||
/* """ "{" "[" "t" "f" "n" "0" - "9" */ | ||
{ | ||
// We assume anything else is JSON. | ||
resolveModel(response, id, row); | ||
return; | ||
} | ||
case 65: | ||
resolveBuffer(response, id, mergeBuffer(buffer, chunk).buffer); | ||
return; | ||
case 79: | ||
resolveTypedArray(response, id, buffer, chunk, Int8Array, 1); | ||
return; | ||
case 111: | ||
resolveBuffer( | ||
response, | ||
id, | ||
0 === buffer.length ? chunk : mergeBuffer(buffer, chunk) | ||
); | ||
return; | ||
case 85: | ||
resolveTypedArray(response, id, buffer, chunk, Uint8ClampedArray, 1); | ||
return; | ||
case 83: | ||
resolveTypedArray(response, id, buffer, chunk, Int16Array, 2); | ||
return; | ||
case 115: | ||
resolveTypedArray(response, id, buffer, chunk, Uint16Array, 2); | ||
return; | ||
case 76: | ||
resolveTypedArray(response, id, buffer, chunk, Int32Array, 4); | ||
return; | ||
case 108: | ||
resolveTypedArray(response, id, buffer, chunk, Uint32Array, 4); | ||
return; | ||
case 71: | ||
resolveTypedArray(response, id, buffer, chunk, Float32Array, 4); | ||
return; | ||
case 103: | ||
resolveTypedArray(response, id, buffer, chunk, Float64Array, 8); | ||
return; | ||
case 77: | ||
resolveTypedArray(response, id, buffer, chunk, BigInt64Array, 8); | ||
return; | ||
case 109: | ||
resolveTypedArray(response, id, buffer, chunk, BigUint64Array, 8); | ||
return; | ||
case 86: | ||
resolveTypedArray(response, id, buffer, chunk, DataView, 1); | ||
return; | ||
} | ||
for ( | ||
var stringDecoder = response._stringDecoder, row = "", i = 0; | ||
i < buffer.length; | ||
i++ | ||
) | ||
row += stringDecoder.decode(buffer[i], decoderOptions); | ||
row += stringDecoder.decode(chunk); | ||
processFullStringRow(response, id, tag, row); | ||
} | ||
function processBinaryChunk(response, chunk) { | ||
let i = 0; | ||
let rowState = response._rowState; | ||
let rowID = response._rowID; | ||
let rowTag = response._rowTag; | ||
let rowLength = response._rowLength; | ||
const buffer = response._buffer; | ||
const chunkLength = chunk.length; | ||
while (i < chunkLength) { | ||
let lastIdx = -1; | ||
switch (rowState) { | ||
case ROW_ID: | ||
{ | ||
const byte = chunk[i++]; | ||
if (byte === 58 | ||
/* ":" */ | ||
) { | ||
// Finished the rowID, next we'll parse the tag. | ||
rowState = ROW_TAG; | ||
} else { | ||
rowID = rowID << 4 | (byte > 96 ? byte - 87 : byte - 48); | ||
} | ||
continue; | ||
} | ||
case ROW_TAG: | ||
{ | ||
const resolvedRowTag = chunk[i]; | ||
if (resolvedRowTag === 84 | ||
/* "T" */ | ||
|| (resolvedRowTag === 65 | ||
/* "A" */ | ||
|| resolvedRowTag === 67 | ||
/* "C" */ | ||
|| resolvedRowTag === 99 | ||
/* "c" */ | ||
|| resolvedRowTag === 85 | ||
/* "U" */ | ||
|| resolvedRowTag === 83 | ||
/* "S" */ | ||
|| resolvedRowTag === 115 | ||
/* "s" */ | ||
|| resolvedRowTag === 76 | ||
/* "L" */ | ||
|| resolvedRowTag === 108 | ||
/* "l" */ | ||
|| resolvedRowTag === 70 | ||
/* "F" */ | ||
|| resolvedRowTag === 100 | ||
/* "d" */ | ||
|| resolvedRowTag === 78 | ||
/* "N" */ | ||
|| resolvedRowTag === 109 | ||
/* "m" */ | ||
|| resolvedRowTag === 86) | ||
/* "V" */ | ||
) { | ||
rowTag = resolvedRowTag; | ||
rowState = ROW_LENGTH; | ||
i++; | ||
} else if (resolvedRowTag > 64 && resolvedRowTag < 91 | ||
/* "A"-"Z" */ | ||
) { | ||
rowTag = resolvedRowTag; | ||
rowState = ROW_CHUNK_BY_NEWLINE; | ||
i++; | ||
} else { | ||
rowTag = 0; | ||
rowState = ROW_CHUNK_BY_NEWLINE; // This was an unknown tag so it was probably part of the data. | ||
} | ||
continue; | ||
} | ||
case ROW_LENGTH: | ||
{ | ||
const byte = chunk[i++]; | ||
if (byte === 44 | ||
/* "," */ | ||
) { | ||
// Finished the rowLength, next we'll buffer up to that length. | ||
rowState = ROW_CHUNK_BY_LENGTH; | ||
} else { | ||
rowLength = rowLength << 4 | (byte > 96 ? byte - 87 : byte - 48); | ||
} | ||
continue; | ||
} | ||
case ROW_CHUNK_BY_NEWLINE: | ||
{ | ||
// We're looking for a newline | ||
lastIdx = chunk.indexOf(10 | ||
/* "\n" */ | ||
, i); | ||
function processFullStringRow(response, id, tag, row) { | ||
switch (tag) { | ||
case 73: | ||
resolveModule(response, id, row); | ||
break; | ||
case 72: | ||
id = row[0]; | ||
row = row.slice(1); | ||
response = JSON.parse(row, response._fromJSON); | ||
row = ReactDOMSharedInternals.d; | ||
switch (id) { | ||
case "D": | ||
row.D(response); | ||
break; | ||
} | ||
case ROW_CHUNK_BY_LENGTH: | ||
{ | ||
// We're looking for the remaining byte length | ||
lastIdx = i + rowLength; | ||
if (lastIdx > chunk.length) { | ||
lastIdx = -1; | ||
} | ||
case "C": | ||
"string" === typeof response | ||
? row.C(response) | ||
: row.C(response[0], response[1]); | ||
break; | ||
} | ||
} | ||
const offset = chunk.byteOffset + i; | ||
if (lastIdx > -1) { | ||
// We found the last chunk of the row | ||
const length = lastIdx - i; | ||
const lastChunk = new Uint8Array(chunk.buffer, offset, length); | ||
processFullRow(response, rowID, rowTag, buffer, lastChunk); // Reset state machine for a new row | ||
i = lastIdx; | ||
if (rowState === ROW_CHUNK_BY_NEWLINE) { | ||
// If we're trailing by a newline we need to skip it. | ||
i++; | ||
case "L": | ||
id = response[0]; | ||
tag = response[1]; | ||
3 === response.length ? row.L(id, tag, response[2]) : row.L(id, tag); | ||
break; | ||
case "m": | ||
"string" === typeof response | ||
? row.m(response) | ||
: row.m(response[0], response[1]); | ||
break; | ||
case "X": | ||
"string" === typeof response | ||
? row.X(response) | ||
: row.X(response[0], response[1]); | ||
break; | ||
case "S": | ||
"string" === typeof response | ||
? row.S(response) | ||
: row.S( | ||
response[0], | ||
0 === response[1] ? void 0 : response[1], | ||
3 === response.length ? response[2] : void 0 | ||
); | ||
break; | ||
case "M": | ||
"string" === typeof response | ||
? row.M(response) | ||
: row.M(response[0], response[1]); | ||
} | ||
rowState = ROW_ID; | ||
rowTag = 0; | ||
rowID = 0; | ||
rowLength = 0; | ||
buffer.length = 0; | ||
} else { | ||
// The rest of this row is in a future chunk. We stash the rest of the | ||
// current chunk until we can process the full row. | ||
const length = chunk.byteLength - i; | ||
const remainingSlice = new Uint8Array(chunk.buffer, offset, length); | ||
buffer.push(remainingSlice); // Update how many bytes we're still waiting for. If we're looking for | ||
// a newline, this doesn't hurt since we'll just ignore it. | ||
rowLength -= remainingSlice.byteLength; | ||
break; | ||
} | ||
case 69: | ||
tag = JSON.parse(row); | ||
row = resolveErrorProd(); | ||
row.digest = tag.digest; | ||
tag = response._chunks; | ||
var chunk = tag.get(id); | ||
chunk | ||
? triggerErrorOnChunk(chunk, row) | ||
: tag.set(id, new ReactPromise("rejected", null, row, response)); | ||
break; | ||
case 84: | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) && "pending" !== chunk.status | ||
? chunk.reason.enqueueValue(row) | ||
: tag.set(id, new ReactPromise("fulfilled", row, null, response)); | ||
break; | ||
case 78: | ||
case 68: | ||
case 87: | ||
throw Error( | ||
"Failed to read a RSC payload created by a development version of React on the server while using a production version on the client. Always use matching versions on the server and the client." | ||
); | ||
case 82: | ||
startReadableStream(response, id, void 0); | ||
break; | ||
case 114: | ||
startReadableStream(response, id, "bytes"); | ||
break; | ||
case 88: | ||
startAsyncIterable(response, id, !1); | ||
break; | ||
case 120: | ||
startAsyncIterable(response, id, !0); | ||
break; | ||
case 67: | ||
(response = response._chunks.get(id)) && | ||
"fulfilled" === response.status && | ||
response.reason.close("" === row ? '"$undefined"' : row); | ||
break; | ||
case 80: | ||
row = Error( | ||
"A Server Component was postponed. The reason is omitted in production builds to avoid leaking sensitive details." | ||
); | ||
row.$$typeof = REACT_POSTPONE_TYPE; | ||
row.stack = "Error: " + row.message; | ||
tag = response._chunks; | ||
(chunk = tag.get(id)) | ||
? triggerErrorOnChunk(chunk, row) | ||
: tag.set(id, new ReactPromise("rejected", null, row, response)); | ||
break; | ||
default: | ||
(tag = response._chunks), | ||
(chunk = tag.get(id)) | ||
? resolveModelChunk(chunk, row) | ||
: tag.set( | ||
id, | ||
new ReactPromise("resolved_model", row, null, response) | ||
); | ||
} | ||
response._rowState = rowState; | ||
response._rowID = rowID; | ||
response._rowTag = rowTag; | ||
response._rowLength = rowLength; | ||
} | ||
function parseModel(response, json) { | ||
return JSON.parse(json, response._fromJSON); | ||
} | ||
function createFromJSONCallback(response) { | ||
// $FlowFixMe[missing-this-annot] | ||
return function (key, value) { | ||
if (typeof value === 'string') { | ||
// We can't use .bind here because we need the "this" value. | ||
if ("string" === typeof value) | ||
return parseModelString(response, this, key, value); | ||
if ("object" === typeof value && null !== value) { | ||
if (value[0] === REACT_ELEMENT_TYPE) { | ||
if ( | ||
((key = { | ||
$$typeof: REACT_ELEMENT_TYPE, | ||
type: value[1], | ||
key: value[2], | ||
ref: null, | ||
props: value[3] | ||
}), | ||
null !== initializingHandler) | ||
) | ||
if ( | ||
((value = initializingHandler), | ||
(initializingHandler = value.parent), | ||
value.errored) | ||
) | ||
(key = new ReactPromise("rejected", null, value.value, response)), | ||
(key = createLazyChunkWrapper(key)); | ||
else if (0 < value.deps) { | ||
var blockedChunk = new ReactPromise( | ||
"blocked", | ||
null, | ||
null, | ||
response | ||
); | ||
value.value = key; | ||
value.chunk = blockedChunk; | ||
key = createLazyChunkWrapper(blockedChunk); | ||
} | ||
} else key = value; | ||
return key; | ||
} | ||
if (typeof value === 'object' && value !== null) { | ||
return parseModelTuple(response, value); | ||
} | ||
return value; | ||
}; | ||
} | ||
function close(response) { | ||
// In case there are any remaining unresolved chunks, they won't | ||
// be resolved now. So we need to issue an error to those. | ||
// Ideally we should be able to early bail out if we kept a | ||
// ref count of pending chunks. | ||
reportGlobalError(response, new Error('Connection closed.')); | ||
} | ||
function noServerCall() { | ||
throw new Error('Server Functions cannot be called during initial render. ' + 'This would create a fetch waterfall. Try to use a Server Component ' + 'to pass data to Client Components instead.'); | ||
throw Error( | ||
"Server Functions cannot be called during initial render. This would create a fetch waterfall. Try to use a Server Component to pass data to Client Components instead." | ||
); | ||
} | ||
exports.createFromNodeStream = function ( | ||
stream, | ||
serverConsumerManifest, | ||
options | ||
) { | ||
var response = new ResponseInstance( | ||
serverConsumerManifest.moduleMap, | ||
serverConsumerManifest.serverModuleMap, | ||
serverConsumerManifest.moduleLoading, | ||
noServerCall, | ||
options ? options.encodeFormAction : void 0, | ||
options && "string" === typeof options.nonce ? options.nonce : void 0, | ||
void 0 | ||
); | ||
stream.on("data", function (chunk) { | ||
if ("string" === typeof chunk) { | ||
for ( | ||
var i = 0, | ||
rowState = response._rowState, | ||
rowID = response._rowID, | ||
rowTag = response._rowTag, | ||
rowLength = response._rowLength, | ||
buffer = response._buffer, | ||
chunkLength = chunk.length; | ||
i < chunkLength; | ||
function createServerReference(id, callServer) { | ||
return createServerReference$1(id, noServerCall); | ||
} | ||
function createFromNodeStream(stream, ssrManifest, options) { | ||
const response = createResponse(ssrManifest.moduleMap, ssrManifest.moduleLoading, noServerCall, options && typeof options.nonce === 'string' ? options.nonce : undefined); | ||
stream.on('data', chunk => { | ||
processBinaryChunk(response, chunk); | ||
) { | ||
var lastIdx = -1; | ||
switch (rowState) { | ||
case 0: | ||
lastIdx = chunk.charCodeAt(i++); | ||
58 === lastIdx | ||
? (rowState = 1) | ||
: (rowID = | ||
(rowID << 4) | (96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 1: | ||
rowState = chunk.charCodeAt(i); | ||
84 === rowState || | ||
65 === rowState || | ||
79 === rowState || | ||
111 === rowState || | ||
85 === rowState || | ||
83 === rowState || | ||
115 === rowState || | ||
76 === rowState || | ||
108 === rowState || | ||
71 === rowState || | ||
103 === rowState || | ||
77 === rowState || | ||
109 === rowState || | ||
86 === rowState | ||
? ((rowTag = rowState), (rowState = 2), i++) | ||
: (64 < rowState && 91 > rowState) || | ||
114 === rowState || | ||
120 === rowState | ||
? ((rowTag = rowState), (rowState = 3), i++) | ||
: ((rowTag = 0), (rowState = 3)); | ||
continue; | ||
case 2: | ||
lastIdx = chunk.charCodeAt(i++); | ||
44 === lastIdx | ||
? (rowState = 4) | ||
: (rowLength = | ||
(rowLength << 4) | | ||
(96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 3: | ||
lastIdx = chunk.indexOf("\n", i); | ||
break; | ||
case 4: | ||
if (84 !== rowTag) | ||
throw Error( | ||
"Binary RSC chunks cannot be encoded as strings. This is a bug in the wiring of the React streams." | ||
); | ||
if (rowLength < chunk.length || chunk.length > 3 * rowLength) | ||
throw Error( | ||
"String chunks need to be passed in their original shape. Not split into smaller string chunks. This is a bug in the wiring of the React streams." | ||
); | ||
lastIdx = chunk.length; | ||
} | ||
if (-1 < lastIdx) { | ||
if (0 < buffer.length) | ||
throw Error( | ||
"String chunks need to be passed in their original shape. Not split into smaller string chunks. This is a bug in the wiring of the React streams." | ||
); | ||
i = chunk.slice(i, lastIdx); | ||
processFullStringRow(response, rowID, rowTag, i); | ||
i = lastIdx; | ||
3 === rowState && i++; | ||
rowLength = rowID = rowTag = rowState = 0; | ||
buffer.length = 0; | ||
} else if (chunk.length !== i) | ||
throw Error( | ||
"String chunks need to be passed in their original shape. Not split into smaller string chunks. This is a bug in the wiring of the React streams." | ||
); | ||
} | ||
response._rowState = rowState; | ||
response._rowID = rowID; | ||
response._rowTag = rowTag; | ||
response._rowLength = rowLength; | ||
} else { | ||
rowLength = 0; | ||
chunkLength = response._rowState; | ||
rowID = response._rowID; | ||
i = response._rowTag; | ||
rowState = response._rowLength; | ||
buffer = response._buffer; | ||
for (rowTag = chunk.length; rowLength < rowTag; ) { | ||
lastIdx = -1; | ||
switch (chunkLength) { | ||
case 0: | ||
lastIdx = chunk[rowLength++]; | ||
58 === lastIdx | ||
? (chunkLength = 1) | ||
: (rowID = | ||
(rowID << 4) | (96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 1: | ||
chunkLength = chunk[rowLength]; | ||
84 === chunkLength || | ||
65 === chunkLength || | ||
79 === chunkLength || | ||
111 === chunkLength || | ||
85 === chunkLength || | ||
83 === chunkLength || | ||
115 === chunkLength || | ||
76 === chunkLength || | ||
108 === chunkLength || | ||
71 === chunkLength || | ||
103 === chunkLength || | ||
77 === chunkLength || | ||
109 === chunkLength || | ||
86 === chunkLength | ||
? ((i = chunkLength), (chunkLength = 2), rowLength++) | ||
: (64 < chunkLength && 91 > chunkLength) || | ||
35 === chunkLength || | ||
114 === chunkLength || | ||
120 === chunkLength | ||
? ((i = chunkLength), (chunkLength = 3), rowLength++) | ||
: ((i = 0), (chunkLength = 3)); | ||
continue; | ||
case 2: | ||
lastIdx = chunk[rowLength++]; | ||
44 === lastIdx | ||
? (chunkLength = 4) | ||
: (rowState = | ||
(rowState << 4) | | ||
(96 < lastIdx ? lastIdx - 87 : lastIdx - 48)); | ||
continue; | ||
case 3: | ||
lastIdx = chunk.indexOf(10, rowLength); | ||
break; | ||
case 4: | ||
(lastIdx = rowLength + rowState), | ||
lastIdx > chunk.length && (lastIdx = -1); | ||
} | ||
var offset = chunk.byteOffset + rowLength; | ||
if (-1 < lastIdx) | ||
(rowState = new Uint8Array( | ||
chunk.buffer, | ||
offset, | ||
lastIdx - rowLength | ||
)), | ||
processFullBinaryRow(response, rowID, i, buffer, rowState), | ||
(rowLength = lastIdx), | ||
3 === chunkLength && rowLength++, | ||
(rowState = rowID = i = chunkLength = 0), | ||
(buffer.length = 0); | ||
else { | ||
chunk = new Uint8Array( | ||
chunk.buffer, | ||
offset, | ||
chunk.byteLength - rowLength | ||
); | ||
buffer.push(chunk); | ||
rowState -= chunk.byteLength; | ||
break; | ||
} | ||
} | ||
response._rowState = chunkLength; | ||
response._rowID = rowID; | ||
response._rowTag = i; | ||
response._rowLength = rowState; | ||
} | ||
}); | ||
stream.on('error', error => { | ||
stream.on("error", function (error) { | ||
reportGlobalError(response, error); | ||
}); | ||
stream.on('end', () => close(response)); | ||
return getRoot(response); | ||
} | ||
exports.createFromNodeStream = createFromNodeStream; | ||
exports.createServerReference = createServerReference; | ||
stream.on("end", function () { | ||
reportGlobalError(response, Error("Connection closed.")); | ||
}); | ||
return getChunk(response, 0); | ||
}; | ||
exports.createServerReference = function (id) { | ||
return createServerReference$1(id, noServerCall); | ||
}; |
@@ -1,14 +0,69 @@ | ||
/* | ||
React | ||
react-server-dom-webpack-node-register.js | ||
/** | ||
* @license React | ||
* react-server-dom-webpack-node-register.js | ||
* | ||
* Copyright (c) Meta Platforms, Inc. and affiliates. | ||
* | ||
* This source code is licensed under the MIT license found in the | ||
* LICENSE file in the root directory of this source tree. | ||
*/ | ||
Copyright (c) Meta Platforms, Inc. and affiliates. | ||
This source code is licensed under the MIT license found in the | ||
LICENSE file in the root directory of this source tree. | ||
*/ | ||
'use strict';const h=require("acorn-loose"),l=require("url"),q=require("module"); | ||
module.exports=function(){const m=require("react-server-dom-webpack/server"),n=m.registerServerReference,r=m.createClientModuleProxy,f=q.prototype._compile;q.prototype._compile=function(k,p){if(-1===k.indexOf("use client")&&-1===k.indexOf("use server"))return f.apply(this,arguments);try{var a=h.parse(k,{ecmaVersion:"2024",sourceType:"source"}).body}catch(g){return console.error("Error parsing %s %s",l,g.message),f.apply(this,arguments)}var b=!1,d=!1;for(var c=0;c<a.length;c++){var e=a[c];if("ExpressionStatement"!== | ||
e.type||!e.directive)break;"use client"===e.directive&&(b=!0);"use server"===e.directive&&(d=!0)}if(!b&&!d)return f.apply(this,arguments);if(b&&d)throw Error('Cannot have both "use client" and "use server" directives in the same file.');b&&(a=l.pathToFileURL(p).href,this.exports=r(a));if(d)if(f.apply(this,arguments),d=l.pathToFileURL(p).href,a=this.exports,"function"===typeof a)n(a,d,null);else for(b=Object.keys(a),c=0;c<b.length;c++){e=b[c];const g=a[b[c]];"function"===typeof g&&n(g,d,e)}}}; | ||
//# sourceMappingURL=react-server-dom-webpack-node-register.js.map | ||
"use strict"; | ||
const acorn = require("acorn-loose"), | ||
url = require("url"), | ||
Module = require("module"); | ||
module.exports = function () { | ||
const Server = require("react-server-dom-webpack/server"), | ||
registerServerReference = Server.registerServerReference, | ||
createClientModuleProxy = Server.createClientModuleProxy, | ||
originalCompile = Module.prototype._compile; | ||
Module.prototype._compile = function (content, filename) { | ||
if ( | ||
-1 === content.indexOf("use client") && | ||
-1 === content.indexOf("use server") | ||
) | ||
return originalCompile.apply(this, arguments); | ||
try { | ||
var body = acorn.parse(content, { | ||
ecmaVersion: "2024", | ||
sourceType: "source" | ||
}).body; | ||
} catch (x) { | ||
return ( | ||
console.error("Error parsing %s %s", url, x.message), | ||
originalCompile.apply(this, arguments) | ||
); | ||
} | ||
var useClient = !1, | ||
useServer = !1; | ||
for (var i = 0; i < body.length; i++) { | ||
var node = body[i]; | ||
if ("ExpressionStatement" !== node.type || !node.directive) break; | ||
"use client" === node.directive && (useClient = !0); | ||
"use server" === node.directive && (useServer = !0); | ||
} | ||
if (!useClient && !useServer) return originalCompile.apply(this, arguments); | ||
if (useClient && useServer) | ||
throw Error( | ||
'Cannot have both "use client" and "use server" directives in the same file.' | ||
); | ||
useClient && | ||
((body = url.pathToFileURL(filename).href), | ||
(this.exports = createClientModuleProxy(body))); | ||
if (useServer) | ||
if ( | ||
(originalCompile.apply(this, arguments), | ||
(useServer = url.pathToFileURL(filename).href), | ||
(body = this.exports), | ||
"function" === typeof body) | ||
) | ||
registerServerReference(body, useServer, null); | ||
else | ||
for (useClient = Object.keys(body), i = 0; i < useClient.length; i++) { | ||
node = useClient[i]; | ||
const value = body[useClient[i]]; | ||
"function" === typeof value && | ||
registerServerReference(value, useServer, node); | ||
} | ||
}; | ||
}; |
@@ -1,24 +0,399 @@ | ||
/* | ||
React | ||
react-server-dom-webpack-plugin.js | ||
/** | ||
* @license React | ||
* react-server-dom-webpack-plugin.js | ||
* | ||
* Copyright (c) Meta Platforms, Inc. and affiliates. | ||
* | ||
* This source code is licensed under the MIT license found in the | ||
* LICENSE file in the root directory of this source tree. | ||
*/ | ||
Copyright (c) Meta Platforms, Inc. and affiliates. | ||
This source code is licensed under the MIT license found in the | ||
LICENSE file in the root directory of this source tree. | ||
*/ | ||
'use strict';var w=require("path"),x=require("url"),y=require("neo-async"),z=require("acorn-loose"),B=require("webpack/lib/dependencies/ModuleDependency"),C=require("webpack/lib/dependencies/NullDependency"),D=require("webpack/lib/Template"),E=require("webpack"); | ||
function F(a,g){if(a){if("string"===typeof a)return G(a,g);var c=Object.prototype.toString.call(a).slice(8,-1);"Object"===c&&a.constructor&&(c=a.constructor.name);if("Map"===c||"Set"===c)return Array.from(a);if("Arguments"===c||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(c))return G(a,g)}}function G(a,g){if(null==g||g>a.length)g=a.length;for(var c=0,n=Array(g);c<g;c++)n[c]=a[c];return n} | ||
function H(a,g){var c;if("undefined"===typeof Symbol||null==a[Symbol.iterator]){if(Array.isArray(a)||(c=F(a))||g&&a&&"number"===typeof a.length){c&&(a=c);var n=0;g=function(){};return{s:g,n:function(){return n>=a.length?{done:!0}:{done:!1,value:a[n++]}},e:function(b){throw b;},f:g}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");}var d=!0,e=!1,p;return{s:function(){c=a[Symbol.iterator]()}, | ||
n:function(){var b=c.next();d=b.done;return b},e:function(b){e=!0;p=b},f:function(){try{d||null==c.return||c.return()}finally{if(e)throw p;}}}}const I=Array.isArray;class J extends B{constructor(a){super(a)}get type(){return"client-reference"}}const K=require.resolve("../client.browser.js"); | ||
class L{constructor(a){this.ssrManifestFilename=this.clientManifestFilename=this.chunkName=this.clientReferences=void 0;if(!a||"boolean"!==typeof a.isServer)throw Error("React Server Plugin: You must specify the isServer option as a boolean.");if(a.isServer)throw Error("TODO: Implement the server compiler.");a.clientReferences?"string"!==typeof a.clientReferences&&I(a.clientReferences)?this.clientReferences=a.clientReferences:this.clientReferences=[a.clientReferences]:this.clientReferences=[{directory:".", | ||
recursive:!0,include:/\.(js|ts|jsx|tsx)$/}];"string"===typeof a.chunkName?(this.chunkName=a.chunkName,/\[(index|request)\]/.test(this.chunkName)||(this.chunkName+="[index]")):this.chunkName="client[index]";this.clientManifestFilename=a.clientManifestFilename||"react-client-manifest.json";this.ssrManifestFilename=a.ssrManifestFilename||"react-ssr-manifest.json"}apply(a){const g=this;let c,n=!1;a.hooks.beforeCompile.tapAsync("React Server Plugin",(d,e)=>{d=d.contextModuleFactory;const p=a.resolverFactory.get("context", | ||
{}),b=a.resolverFactory.get("normal");g.resolveAllClientFiles(a.context,p,b,a.inputFileSystem,d,function(f,h){f?e(f):(c=h,e())})});a.hooks.thisCompilation.tap("React Server Plugin",(d,e)=>{e=e.normalModuleFactory;d.dependencyFactories.set(J,e);d.dependencyTemplates.set(J,new C.Template);d=p=>{p.hooks.program.tap("React Server Plugin",()=>{const b=p.state.module;if(b.resource===K&&(n=!0,c))for(let h=0;h<c.length;h++){const t=c[h];var f=g.chunkName.replace(/\[index\]/g,""+h).replace(/\[request\]/g, | ||
D.toPath(t.userRequest));f=new E.AsyncDependenciesBlock({name:f},null,t.request);f.addDependency(t);b.addBlock(f)}})};e.hooks.parser.for("javascript/auto").tap("HarmonyModulesPlugin",d);e.hooks.parser.for("javascript/esm").tap("HarmonyModulesPlugin",d);e.hooks.parser.for("javascript/dynamic").tap("HarmonyModulesPlugin",d)});a.hooks.make.tap("React Server Plugin",d=>{d.hooks.processAssets.tap({name:"React Server Plugin",stage:E.Compilation.PROCESS_ASSETS_STAGE_REPORT},function(){if(!1===n)d.warnings.push(new E.WebpackError("Client runtime at react-server-dom-webpack/client was not found. React Server Components module map file "+ | ||
g.clientManifestFilename+" was not created."));else{var e=d.outputOptions.crossOriginLoading;e="string"===typeof e?"use-credentials"===e?e:"anonymous":null;var p=new Set((c||[]).map(m=>m.request)),b={},f={};e={moduleLoading:{prefix:d.outputOptions.publicPath||"",crossOrigin:e},moduleMap:f};var h=new Set;d.entrypoints.forEach(m=>{(m=m.getRuntimeChunk())&&m.files.forEach(v=>{h.add(v)})});d.chunkGroups.forEach(function(m){function v(k,l){if(p.has(l.resource)&&(l=x.pathToFileURL(l.resource).href,void 0!== | ||
l)){const q={};b[l]={id:k,chunks:u,name:"*"};q["*"]={specifier:l,name:"*"};f[k]=q}}const u=[];m.chunks.forEach(function(k){var l=H(k.files),q;try{for(l.s();!(q=l.n()).done;){const r=q.value;if(!r.endsWith(".js"))break;if(r.endsWith(".hot-update.js"))break;u.push(k.id,r);break}}catch(r){l.e(r)}finally{l.f()}});m.chunks.forEach(function(k){k=d.chunkGraph.getChunkModulesIterable(k);Array.from(k).forEach(function(l){const q=d.chunkGraph.getModuleId(l);v(q,l);l.modules&&l.modules.forEach(r=>{v(q,r)})})})}); | ||
var t=JSON.stringify(b,null,2);d.emitAsset(g.clientManifestFilename,new E.sources.RawSource(t,!1));e=JSON.stringify(e,null,2);d.emitAsset(g.ssrManifestFilename,new E.sources.RawSource(e,!1))}})})}resolveAllClientFiles(a,g,c,n,d,e){function p(b){if(-1===b.indexOf("use client"))return!1;let f;try{f=z.parse(b,{ecmaVersion:"2024",sourceType:"module"}).body}catch(h){return!1}for(b=0;b<f.length;b++){const h=f[b];if("ExpressionStatement"!==h.type||!h.directive)break;if("use client"===h.directive)return!0}return!1} | ||
y.map(this.clientReferences,(b,f)=>{"string"===typeof b?f(null,[new J(b)]):g.resolve({},a,b.directory,{},(h,t)=>{if(h)return f(h);d.resolveDependencies(n,{resource:t,resourceQuery:"",recursive:void 0===b.recursive?!0:b.recursive,regExp:b.include,include:void 0,exclude:b.exclude},(m,v)=>{if(m)return f(m);m=v.map(u=>{var k=w.join(t,u.userRequest);k=new J(k);k.userRequest=u.userRequest;return k});y.filter(m,(u,k)=>{c.resolve({},a,u.request,{},(l,q)=>{if(l||"string"!==typeof q)return k(null,!1);n.readFile(q, | ||
"utf-8",(r,A)=>{if(r||"string"!==typeof A)return k(null,!1);r=p(A);k(null,r)})})},f)})})},(b,f)=>{if(b)return e(b);b=[];for(let h=0;h<f.length;h++)b.push.apply(b,f[h]);e(null,b)})}}module.exports=L; | ||
//# sourceMappingURL=react-server-dom-webpack-plugin.js.map | ||
"use strict"; | ||
var path = require("path"), | ||
url = require("url"), | ||
asyncLib = require("neo-async"), | ||
acorn = require("acorn-loose"), | ||
ModuleDependency = require("webpack/lib/dependencies/ModuleDependency"), | ||
NullDependency = require("webpack/lib/dependencies/NullDependency"), | ||
Template = require("webpack/lib/Template"), | ||
webpack = require("webpack"); | ||
function _unsupportedIterableToArray(o, minLen) { | ||
if (o) { | ||
if ("string" === typeof o) return _arrayLikeToArray(o, minLen); | ||
var n = Object.prototype.toString.call(o).slice(8, -1); | ||
"Object" === n && o.constructor && (n = o.constructor.name); | ||
if ("Map" === n || "Set" === n) return Array.from(o); | ||
if ("Arguments" === n || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) | ||
return _arrayLikeToArray(o, minLen); | ||
} | ||
} | ||
function _arrayLikeToArray(arr, len) { | ||
if (null == len || len > arr.length) len = arr.length; | ||
for (var i = 0, arr2 = Array(len); i < len; i++) arr2[i] = arr[i]; | ||
return arr2; | ||
} | ||
function _createForOfIteratorHelper(o, allowArrayLike) { | ||
var it; | ||
if ("undefined" === typeof Symbol || null == o[Symbol.iterator]) { | ||
if ( | ||
Array.isArray(o) || | ||
(it = _unsupportedIterableToArray(o)) || | ||
(allowArrayLike && o && "number" === typeof o.length) | ||
) { | ||
it && (o = it); | ||
var i = 0; | ||
allowArrayLike = function () {}; | ||
return { | ||
s: allowArrayLike, | ||
n: function () { | ||
return i >= o.length ? { done: !0 } : { done: !1, value: o[i++] }; | ||
}, | ||
e: function (e) { | ||
throw e; | ||
}, | ||
f: allowArrayLike | ||
}; | ||
} | ||
throw new TypeError( | ||
"Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method." | ||
); | ||
} | ||
var normalCompletion = !0, | ||
didErr = !1, | ||
err; | ||
return { | ||
s: function () { | ||
it = o[Symbol.iterator](); | ||
}, | ||
n: function () { | ||
var step = it.next(); | ||
normalCompletion = step.done; | ||
return step; | ||
}, | ||
e: function (e) { | ||
didErr = !0; | ||
err = e; | ||
}, | ||
f: function () { | ||
try { | ||
normalCompletion || null == it.return || it.return(); | ||
} finally { | ||
if (didErr) throw err; | ||
} | ||
} | ||
}; | ||
} | ||
const isArrayImpl = Array.isArray; | ||
class ClientReferenceDependency extends ModuleDependency { | ||
constructor(request) { | ||
super(request); | ||
} | ||
get type() { | ||
return "client-reference"; | ||
} | ||
} | ||
const clientFileName = require.resolve("../client.browser.js"); | ||
class ReactFlightWebpackPlugin { | ||
constructor(options) { | ||
this.serverConsumerManifestFilename = | ||
this.clientManifestFilename = | ||
this.chunkName = | ||
this.clientReferences = | ||
void 0; | ||
if (!options || "boolean" !== typeof options.isServer) | ||
throw Error( | ||
"React Server Plugin: You must specify the isServer option as a boolean." | ||
); | ||
if (options.isServer) throw Error("TODO: Implement the server compiler."); | ||
options.clientReferences | ||
? "string" !== typeof options.clientReferences && | ||
isArrayImpl(options.clientReferences) | ||
? (this.clientReferences = options.clientReferences) | ||
: (this.clientReferences = [options.clientReferences]) | ||
: (this.clientReferences = [ | ||
{ directory: ".", recursive: !0, include: /\.(js|ts|jsx|tsx)$/ } | ||
]); | ||
"string" === typeof options.chunkName | ||
? ((this.chunkName = options.chunkName), | ||
/\[(index|request)\]/.test(this.chunkName) || | ||
(this.chunkName += "[index]")) | ||
: (this.chunkName = "client[index]"); | ||
this.clientManifestFilename = | ||
options.clientManifestFilename || "react-client-manifest.json"; | ||
this.serverConsumerManifestFilename = | ||
options.serverConsumerManifestFilename || "react-ssr-manifest.json"; | ||
} | ||
apply(compiler) { | ||
const _this = this; | ||
let resolvedClientReferences, | ||
clientFileNameFound = !1; | ||
compiler.hooks.beforeCompile.tapAsync( | ||
"React Server Plugin", | ||
(_ref, callback) => { | ||
_ref = _ref.contextModuleFactory; | ||
const contextResolver = compiler.resolverFactory.get("context", {}), | ||
normalResolver = compiler.resolverFactory.get("normal"); | ||
_this.resolveAllClientFiles( | ||
compiler.context, | ||
contextResolver, | ||
normalResolver, | ||
compiler.inputFileSystem, | ||
_ref, | ||
function (err, resolvedClientRefs) { | ||
err | ||
? callback(err) | ||
: ((resolvedClientReferences = resolvedClientRefs), callback()); | ||
} | ||
); | ||
} | ||
); | ||
compiler.hooks.thisCompilation.tap( | ||
"React Server Plugin", | ||
(compilation, _ref2) => { | ||
_ref2 = _ref2.normalModuleFactory; | ||
compilation.dependencyFactories.set(ClientReferenceDependency, _ref2); | ||
compilation.dependencyTemplates.set( | ||
ClientReferenceDependency, | ||
new NullDependency.Template() | ||
); | ||
compilation = (parser) => { | ||
parser.hooks.program.tap("React Server Plugin", () => { | ||
const module = parser.state.module; | ||
if ( | ||
module.resource === clientFileName && | ||
((clientFileNameFound = !0), resolvedClientReferences) | ||
) | ||
for (let i = 0; i < resolvedClientReferences.length; i++) { | ||
const dep = resolvedClientReferences[i]; | ||
var chunkName = _this.chunkName | ||
.replace(/\[index\]/g, "" + i) | ||
.replace(/\[request\]/g, Template.toPath(dep.userRequest)); | ||
chunkName = new webpack.AsyncDependenciesBlock( | ||
{ name: chunkName }, | ||
null, | ||
dep.request | ||
); | ||
chunkName.addDependency(dep); | ||
module.addBlock(chunkName); | ||
} | ||
}); | ||
}; | ||
_ref2.hooks.parser | ||
.for("javascript/auto") | ||
.tap("HarmonyModulesPlugin", compilation); | ||
_ref2.hooks.parser | ||
.for("javascript/esm") | ||
.tap("HarmonyModulesPlugin", compilation); | ||
_ref2.hooks.parser | ||
.for("javascript/dynamic") | ||
.tap("HarmonyModulesPlugin", compilation); | ||
} | ||
); | ||
compiler.hooks.make.tap("React Server Plugin", (compilation) => { | ||
compilation.hooks.processAssets.tap( | ||
{ | ||
name: "React Server Plugin", | ||
stage: webpack.Compilation.PROCESS_ASSETS_STAGE_REPORT | ||
}, | ||
function () { | ||
if (!1 === clientFileNameFound) | ||
compilation.warnings.push( | ||
new webpack.WebpackError( | ||
"Client runtime at react-server-dom-webpack/client was not found. React Server Components module map file " + | ||
_this.clientManifestFilename + | ||
" was not created." | ||
) | ||
); | ||
else { | ||
var configuredCrossOriginLoading = | ||
compilation.outputOptions.crossOriginLoading; | ||
configuredCrossOriginLoading = | ||
"string" === typeof configuredCrossOriginLoading | ||
? "use-credentials" === configuredCrossOriginLoading | ||
? configuredCrossOriginLoading | ||
: "anonymous" | ||
: null; | ||
var resolvedClientFiles = new Set( | ||
(resolvedClientReferences || []).map((ref) => ref.request) | ||
), | ||
clientManifest = {}, | ||
moduleMap = {}; | ||
configuredCrossOriginLoading = { | ||
moduleLoading: { | ||
prefix: compilation.outputOptions.publicPath || "", | ||
crossOrigin: configuredCrossOriginLoading | ||
}, | ||
moduleMap | ||
}; | ||
var runtimeChunkFiles = new Set(); | ||
compilation.entrypoints.forEach((entrypoint) => { | ||
(entrypoint = entrypoint.getRuntimeChunk()) && | ||
entrypoint.files.forEach((runtimeFile) => { | ||
runtimeChunkFiles.add(runtimeFile); | ||
}); | ||
}); | ||
compilation.chunkGroups.forEach(function (chunkGroup) { | ||
function recordModule(id, module) { | ||
if ( | ||
resolvedClientFiles.has(module.resource) && | ||
((module = url.pathToFileURL(module.resource).href), | ||
void 0 !== module) | ||
) { | ||
const ssrExports = {}; | ||
clientManifest[module] = { id, chunks, name: "*" }; | ||
ssrExports["*"] = { specifier: module, name: "*" }; | ||
moduleMap[id] = ssrExports; | ||
} | ||
} | ||
const chunks = []; | ||
chunkGroup.chunks.forEach(function (c) { | ||
var _iterator = _createForOfIteratorHelper(c.files), | ||
_step; | ||
try { | ||
for (_iterator.s(); !(_step = _iterator.n()).done; ) { | ||
const file = _step.value; | ||
if (!file.endsWith(".js")) break; | ||
if (file.endsWith(".hot-update.js")) break; | ||
chunks.push(c.id, file); | ||
break; | ||
} | ||
} catch (err) { | ||
_iterator.e(err); | ||
} finally { | ||
_iterator.f(); | ||
} | ||
}); | ||
chunkGroup.chunks.forEach(function (chunk) { | ||
chunk = compilation.chunkGraph.getChunkModulesIterable(chunk); | ||
Array.from(chunk).forEach(function (module) { | ||
const moduleId = compilation.chunkGraph.getModuleId(module); | ||
recordModule(moduleId, module); | ||
module.modules && | ||
module.modules.forEach((concatenatedMod) => { | ||
recordModule(moduleId, concatenatedMod); | ||
}); | ||
}); | ||
}); | ||
}); | ||
var clientOutput = JSON.stringify(clientManifest, null, 2); | ||
compilation.emitAsset( | ||
_this.clientManifestFilename, | ||
new webpack.sources.RawSource(clientOutput, !1) | ||
); | ||
configuredCrossOriginLoading = JSON.stringify( | ||
configuredCrossOriginLoading, | ||
null, | ||
2 | ||
); | ||
compilation.emitAsset( | ||
_this.serverConsumerManifestFilename, | ||
new webpack.sources.RawSource(configuredCrossOriginLoading, !1) | ||
); | ||
} | ||
} | ||
); | ||
}); | ||
} | ||
resolveAllClientFiles( | ||
context, | ||
contextResolver, | ||
normalResolver, | ||
fs, | ||
contextModuleFactory, | ||
callback | ||
) { | ||
function hasUseClientDirective(source) { | ||
if (-1 === source.indexOf("use client")) return !1; | ||
let body; | ||
try { | ||
body = acorn.parse(source, { | ||
ecmaVersion: "2024", | ||
sourceType: "module" | ||
}).body; | ||
} catch (x) { | ||
return !1; | ||
} | ||
for (source = 0; source < body.length; source++) { | ||
const node = body[source]; | ||
if ("ExpressionStatement" !== node.type || !node.directive) break; | ||
if ("use client" === node.directive) return !0; | ||
} | ||
return !1; | ||
} | ||
asyncLib.map( | ||
this.clientReferences, | ||
(clientReferencePath, cb) => { | ||
"string" === typeof clientReferencePath | ||
? cb(null, [new ClientReferenceDependency(clientReferencePath)]) | ||
: contextResolver.resolve( | ||
{}, | ||
context, | ||
clientReferencePath.directory, | ||
{}, | ||
(err, resolvedDirectory) => { | ||
if (err) return cb(err); | ||
contextModuleFactory.resolveDependencies( | ||
fs, | ||
{ | ||
resource: resolvedDirectory, | ||
resourceQuery: "", | ||
recursive: | ||
void 0 === clientReferencePath.recursive | ||
? !0 | ||
: clientReferencePath.recursive, | ||
regExp: clientReferencePath.include, | ||
include: void 0, | ||
exclude: clientReferencePath.exclude | ||
}, | ||
(err2, deps) => { | ||
if (err2) return cb(err2); | ||
err2 = deps.map((dep) => { | ||
var request = path.join( | ||
resolvedDirectory, | ||
dep.userRequest | ||
); | ||
request = new ClientReferenceDependency(request); | ||
request.userRequest = dep.userRequest; | ||
return request; | ||
}); | ||
asyncLib.filter( | ||
err2, | ||
(clientRefDep, filterCb) => { | ||
normalResolver.resolve( | ||
{}, | ||
context, | ||
clientRefDep.request, | ||
{}, | ||
(err3, resolvedPath) => { | ||
if (err3 || "string" !== typeof resolvedPath) | ||
return filterCb(null, !1); | ||
fs.readFile( | ||
resolvedPath, | ||
"utf-8", | ||
(err4, content) => { | ||
if (err4 || "string" !== typeof content) | ||
return filterCb(null, !1); | ||
err4 = hasUseClientDirective(content); | ||
filterCb(null, err4); | ||
} | ||
); | ||
} | ||
); | ||
}, | ||
cb | ||
); | ||
} | ||
); | ||
} | ||
); | ||
}, | ||
(err, result) => { | ||
if (err) return callback(err); | ||
err = []; | ||
for (let i = 0; i < result.length; i++) err.push.apply(err, result[i]); | ||
callback(null, err); | ||
} | ||
); | ||
} | ||
} | ||
module.exports = ReactFlightWebpackPlugin; |
'use strict'; | ||
if (process.env.NODE_ENV === 'production') { | ||
module.exports = require('./cjs/react-server-dom-webpack-client.browser.production.min.js'); | ||
module.exports = require('./cjs/react-server-dom-webpack-client.browser.production.js'); | ||
} else { | ||
module.exports = require('./cjs/react-server-dom-webpack-client.browser.development.js'); | ||
} |
'use strict'; | ||
if (process.env.NODE_ENV === 'production') { | ||
module.exports = require('./cjs/react-server-dom-webpack-client.edge.production.min.js'); | ||
module.exports = require('./cjs/react-server-dom-webpack-client.edge.production.js'); | ||
} else { | ||
module.exports = require('./cjs/react-server-dom-webpack-client.edge.development.js'); | ||
} |
'use strict'; | ||
if (process.env.NODE_ENV === 'production') { | ||
module.exports = require('./cjs/react-server-dom-webpack-client.node.production.min.js'); | ||
module.exports = require('./cjs/react-server-dom-webpack-client.node.production.js'); | ||
} else { | ||
module.exports = require('./cjs/react-server-dom-webpack-client.node.development.js'); | ||
} |
'use strict'; | ||
if (process.env.NODE_ENV === 'production') { | ||
module.exports = require('./cjs/react-server-dom-webpack-client.node.unbundled.production.min.js'); | ||
module.exports = require('./cjs/react-server-dom-webpack-client.node.unbundled.production.js'); | ||
} else { | ||
module.exports = require('./cjs/react-server-dom-webpack-client.node.unbundled.development.js'); | ||
} |
{ | ||
"name": "react-server-dom-webpack", | ||
"description": "React Server Components bindings for DOM using Webpack. This is intended to be integrated into meta-frameworks. It is not intended to be imported directly.", | ||
"version": "0.0.0-experimental-d3def4793-20240208", | ||
"version": "0.0.0-experimental-d4287258-20241217", | ||
"keywords": [ | ||
"react" | ||
], | ||
"homepage": "https://reactjs.org/", | ||
"homepage": "https://react.dev/", | ||
"bugs": "https://github.com/facebook/react/issues", | ||
@@ -26,5 +26,9 @@ "license": "MIT", | ||
"server.node.unbundled.js", | ||
"static.js", | ||
"static.browser.js", | ||
"static.edge.js", | ||
"static.node.js", | ||
"static.node.unbundled.js", | ||
"node-register.js", | ||
"cjs/", | ||
"umd/", | ||
"esm/" | ||
@@ -68,3 +72,20 @@ ], | ||
"./server.node.unbundled": "./server.node.unbundled.js", | ||
"./node-loader": "./esm/react-server-dom-webpack-node-loader.production.min.js", | ||
"./static": { | ||
"react-server": { | ||
"workerd": "./static.edge.js", | ||
"deno": "./static.browser.js", | ||
"node": { | ||
"webpack": "./static.node.js", | ||
"default": "./static.node.unbundled.js" | ||
}, | ||
"edge-light": "./static.edge.js", | ||
"browser": "./static.browser.js" | ||
}, | ||
"default": "./static.js" | ||
}, | ||
"./static.browser": "./static.browser.js", | ||
"./static.edge": "./static.edge.js", | ||
"./static.node": "./static.node.js", | ||
"./static.node.unbundled": "./static.node.unbundled.js", | ||
"./node-loader": "./esm/react-server-dom-webpack-node-loader.production.js", | ||
"./node-register": "./node-register.js", | ||
@@ -83,4 +104,4 @@ "./package.json": "./package.json" | ||
"peerDependencies": { | ||
"react": "0.0.0-experimental-d3def4793-20240208", | ||
"react-dom": "0.0.0-experimental-d3def4793-20240208", | ||
"react": "0.0.0-experimental-d4287258-20241217", | ||
"react-dom": "0.0.0-experimental-d4287258-20241217", | ||
"webpack": "^5.59.0" | ||
@@ -91,9 +112,4 @@ }, | ||
"neo-async": "^2.6.1", | ||
"loose-envify": "^1.1.0" | ||
}, | ||
"browserify": { | ||
"transform": [ | ||
"loose-envify" | ||
] | ||
"webpack-sources": "^3.2.0" | ||
} | ||
} |
'use strict'; | ||
var s; | ||
if (process.env.NODE_ENV === 'production') { | ||
module.exports = require('./cjs/react-server-dom-webpack-server.browser.production.min.js'); | ||
s = require('./cjs/react-server-dom-webpack-server.browser.production.js'); | ||
} else { | ||
module.exports = require('./cjs/react-server-dom-webpack-server.browser.development.js'); | ||
s = require('./cjs/react-server-dom-webpack-server.browser.development.js'); | ||
} | ||
exports.renderToReadableStream = s.renderToReadableStream; | ||
exports.decodeReply = s.decodeReply; | ||
exports.decodeAction = s.decodeAction; | ||
exports.decodeFormState = s.decodeFormState; | ||
exports.registerServerReference = s.registerServerReference; | ||
exports.registerClientReference = s.registerClientReference; | ||
exports.createClientModuleProxy = s.createClientModuleProxy; | ||
exports.createTemporaryReferenceSet = s.createTemporaryReferenceSet; |
'use strict'; | ||
var s; | ||
if (process.env.NODE_ENV === 'production') { | ||
module.exports = require('./cjs/react-server-dom-webpack-server.edge.production.min.js'); | ||
s = require('./cjs/react-server-dom-webpack-server.edge.production.js'); | ||
} else { | ||
module.exports = require('./cjs/react-server-dom-webpack-server.edge.development.js'); | ||
s = require('./cjs/react-server-dom-webpack-server.edge.development.js'); | ||
} | ||
exports.renderToReadableStream = s.renderToReadableStream; | ||
exports.decodeReply = s.decodeReply; | ||
exports.decodeAction = s.decodeAction; | ||
exports.decodeFormState = s.decodeFormState; | ||
exports.registerServerReference = s.registerServerReference; | ||
exports.registerClientReference = s.registerClientReference; | ||
exports.createClientModuleProxy = s.createClientModuleProxy; | ||
exports.createTemporaryReferenceSet = s.createTemporaryReferenceSet; |
'use strict'; | ||
var s; | ||
if (process.env.NODE_ENV === 'production') { | ||
module.exports = require('./cjs/react-server-dom-webpack-server.node.production.min.js'); | ||
s = require('./cjs/react-server-dom-webpack-server.node.production.js'); | ||
} else { | ||
module.exports = require('./cjs/react-server-dom-webpack-server.node.development.js'); | ||
s = require('./cjs/react-server-dom-webpack-server.node.development.js'); | ||
} | ||
exports.renderToPipeableStream = s.renderToPipeableStream; | ||
exports.decodeReplyFromBusboy = s.decodeReplyFromBusboy; | ||
exports.decodeReply = s.decodeReply; | ||
exports.decodeAction = s.decodeAction; | ||
exports.decodeFormState = s.decodeFormState; | ||
exports.registerServerReference = s.registerServerReference; | ||
exports.registerClientReference = s.registerClientReference; | ||
exports.createClientModuleProxy = s.createClientModuleProxy; | ||
exports.createTemporaryReferenceSet = s.createTemporaryReferenceSet; |
'use strict'; | ||
var s; | ||
if (process.env.NODE_ENV === 'production') { | ||
module.exports = require('./cjs/react-server-dom-webpack-server.node.unbundled.production.min.js'); | ||
s = require('./cjs/react-server-dom-webpack-server.node.unbundled.production.js'); | ||
} else { | ||
module.exports = require('./cjs/react-server-dom-webpack-server.node.unbundled.development.js'); | ||
s = require('./cjs/react-server-dom-webpack-server.node.unbundled.development.js'); | ||
} | ||
exports.renderToPipeableStream = s.renderToPipeableStream; | ||
exports.decodeReplyFromBusboy = s.decodeReplyFromBusboy; | ||
exports.decodeReply = s.decodeReply; | ||
exports.decodeAction = s.decodeAction; | ||
exports.decodeFormState = s.decodeFormState; | ||
exports.registerServerReference = s.registerServerReference; | ||
exports.registerClientReference = s.registerClientReference; | ||
exports.createClientModuleProxy = s.createClientModuleProxy; | ||
exports.createTemporaryReferenceSet = s.createTemporaryReferenceSet; |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 1 instance in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 2 instances in 1 package
49342
1
1737652
41
25
12
+ Addedwebpack-sources@^3.2.0
+ Addedreact@0.0.0-experimental-d4287258-20241217(transitive)
+ Addedreact-dom@0.0.0-experimental-d4287258-20241217(transitive)
+ Addedscheduler@0.0.0-experimental-d4287258-20241217(transitive)
- Removedloose-envify@^1.1.0
- Removedjs-tokens@4.0.0(transitive)
- Removedloose-envify@1.4.0(transitive)
- Removedreact@0.0.0-experimental-d3def4793-20240208(transitive)
- Removedreact-dom@0.0.0-experimental-d3def4793-20240208(transitive)
- Removedscheduler@0.0.0-experimental-d3def4793-20240208(transitive)