@loaders.gl/loader-utils
Advanced tools
Comparing version 2.2.5 to 2.3.0-alpha.1
@@ -1,2 +0,3 @@ | ||
export {LoaderObject, WriterObject} from './types/types'; | ||
export {LoaderObject, WriterObject, LoaderContext, DataType, SyncDataType, BatchableDataType, | ||
IFileSystem, IRandomAccessReadFileSystem} from './types'; | ||
@@ -61,4 +62,4 @@ export {default as createWorker} from './lib/create-worker'; | ||
} from './lib/iterator-utils/text-iterators'; | ||
export {forEach, concatenateChunksAsync} from './lib/iterator-utils/async-iteration'; | ||
// REQUEST UTILS | ||
@@ -65,0 +66,0 @@ export {default as RequestScheduler} from './lib/request-utils/request-scheduler'; |
@@ -208,2 +208,14 @@ "use strict"; | ||
}); | ||
Object.defineProperty(exports, "forEach", { | ||
enumerable: true, | ||
get: function get() { | ||
return _asyncIteration.forEach; | ||
} | ||
}); | ||
Object.defineProperty(exports, "concatenateChunksAsync", { | ||
enumerable: true, | ||
get: function get() { | ||
return _asyncIteration.concatenateChunksAsync; | ||
} | ||
}); | ||
Object.defineProperty(exports, "RequestScheduler", { | ||
@@ -261,2 +273,4 @@ enumerable: true, | ||
var _asyncIteration = require("./lib/iterator-utils/async-iteration"); | ||
var _requestScheduler = _interopRequireDefault(require("./lib/request-utils/request-scheduler")); | ||
@@ -263,0 +277,0 @@ |
/** | ||
* | ||
* @param byteLength | ||
* | ||
* @param byteLength | ||
*/ | ||
@@ -9,5 +9,5 @@ export function padTo4Bytes(byteLength); | ||
* Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0 | ||
* @param arrayBuffer | ||
* @param byteOffset | ||
* @param byteLength | ||
* @param arrayBuffer | ||
* @param byteOffset | ||
* @param byteLength | ||
*/ | ||
@@ -17,2 +17,13 @@ export function getZeroOffsetArrayBuffer(arrayBuffer, byteOffset, byteLength); | ||
/** | ||
* Concatenate two ArrayBuffers | ||
* @param source1 The first ArrayBuffer. | ||
* @param source2 The second ArrayBuffer. | ||
* @return A concatenated ArrayBuffer | ||
*/ | ||
export function concatenateArrayBuffers( | ||
source1: ArrayBuffer | Uint8Array, | ||
source2: ArrayBuffer | Uint8Array | ||
): ArrayBuffer; | ||
/** | ||
* Creates a new Uint8Array based on two different ArrayBuffers | ||
@@ -39,2 +50,2 @@ * @param targetBuffer The first buffer. | ||
*/ | ||
export function copyToArray(source: ArrayBuffer|any, target: any, targetOffset: number): number; | ||
export function copyToArray(source: ArrayBuffer | any, target: any, targetOffset: number): number; |
@@ -8,2 +8,3 @@ "use strict"; | ||
exports.getZeroOffsetArrayBuffer = getZeroOffsetArrayBuffer; | ||
exports.concatenateArrayBuffers = concatenateArrayBuffers; | ||
exports.copyArrayBuffer = copyArrayBuffer; | ||
@@ -22,2 +23,11 @@ exports.copyToArray = copyToArray; | ||
function concatenateArrayBuffers(source1, source2) { | ||
var sourceArray1 = source1 instanceof ArrayBuffer ? new Uint8Array(source1) : source1; | ||
var sourceArray2 = source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2; | ||
var temp = new Uint8Array(sourceArray1.byteLength + sourceArray2.byteLength); | ||
temp.set(sourceArray1, 0); | ||
temp.set(sourceArray2, sourceArray1.byteLength); | ||
return temp.buffer; | ||
} | ||
function copyArrayBuffer(targetBuffer, sourceBuffer, byteOffset) { | ||
@@ -24,0 +34,0 @@ var byteLength = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : sourceBuffer.byteLength; |
@@ -22,3 +22,3 @@ "use strict"; | ||
var LATEST = 'beta'; | ||
var VERSION = typeof "2.2.5" !== 'undefined' ? "2.2.5" : LATEST; | ||
var VERSION = typeof "2.3.0-alpha.1" !== 'undefined' ? "2.3.0-alpha.1" : LATEST; | ||
var loadLibraryPromises = {}; | ||
@@ -25,0 +25,0 @@ |
@@ -12,3 +12,3 @@ "use strict"; | ||
var VERSION = typeof "2.2.5" !== 'undefined' ? "2.2.5" : ''; | ||
var VERSION = typeof "2.3.0-alpha.1" !== 'undefined' ? "2.3.0-alpha.1" : ''; | ||
@@ -15,0 +15,0 @@ function validateLoaderVersion(loader) { |
@@ -36,1 +36,24 @@ /** | ||
}; | ||
export type LoaderContext = { | ||
fetch?: any; | ||
loaders?: LoaderObject[]; | ||
url?: string; | ||
parse?: (data: ArrayBuffer, options?: object) => Promise<any>; | ||
parseSync?: (data: ArrayBuffer, options?: object) => any; | ||
parseInBatches?: (data: AsyncIterator<any>, options?: object) => AsyncIterator<any>; | ||
}; | ||
/** Types that can be synchronously parsed */ | ||
export type SyncDataType = string | ArrayBuffer; // TODO File | Blob can be read synchronously... | ||
/** Types that can be parsed async */ | ||
export type DataType = string | ArrayBuffer | File | Blob | Response | ReadableStream; | ||
/** Types that can be parsed in batches */ | ||
export type BatchableDataType = | ||
DataType | | ||
Iterable<ArrayBuffer> | | ||
AsyncIterable<ArrayBuffer> | | ||
Promise<AsyncIterable<ArrayBuffer>>; |
@@ -1,2 +0,3 @@ | ||
export {LoaderObject, WriterObject} from './types/types'; | ||
export {LoaderObject, WriterObject, LoaderContext, DataType, SyncDataType, BatchableDataType, | ||
IFileSystem, IRandomAccessReadFileSystem} from './types'; | ||
@@ -61,4 +62,4 @@ export {default as createWorker} from './lib/create-worker'; | ||
} from './lib/iterator-utils/text-iterators'; | ||
export {forEach, concatenateChunksAsync} from './lib/iterator-utils/async-iteration'; | ||
// REQUEST UTILS | ||
@@ -65,0 +66,0 @@ export {default as RequestScheduler} from './lib/request-utils/request-scheduler'; |
@@ -17,4 +17,5 @@ export { default as createWorker } from './lib/create-worker'; | ||
export { makeTextEncoderIterator, makeTextDecoderIterator, makeLineIterator, makeNumberedLineIterator } from './lib/iterator-utils/text-iterators'; | ||
export { forEach, concatenateChunksAsync } from './lib/iterator-utils/async-iteration'; | ||
export { default as RequestScheduler } from './lib/request-utils/request-scheduler'; | ||
export { getMeshSize as _getMeshSize, getMeshBoundingBox } from './categories/mesh/mesh-utils'; | ||
//# sourceMappingURL=index.js.map |
/** | ||
* | ||
* @param byteLength | ||
* | ||
* @param byteLength | ||
*/ | ||
@@ -9,5 +9,5 @@ export function padTo4Bytes(byteLength); | ||
* Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0 | ||
* @param arrayBuffer | ||
* @param byteOffset | ||
* @param byteLength | ||
* @param arrayBuffer | ||
* @param byteOffset | ||
* @param byteLength | ||
*/ | ||
@@ -17,2 +17,13 @@ export function getZeroOffsetArrayBuffer(arrayBuffer, byteOffset, byteLength); | ||
/** | ||
* Concatenate two ArrayBuffers | ||
* @param source1 The first ArrayBuffer. | ||
* @param source2 The second ArrayBuffer. | ||
* @return A concatenated ArrayBuffer | ||
*/ | ||
export function concatenateArrayBuffers( | ||
source1: ArrayBuffer | Uint8Array, | ||
source2: ArrayBuffer | Uint8Array | ||
): ArrayBuffer; | ||
/** | ||
* Creates a new Uint8Array based on two different ArrayBuffers | ||
@@ -39,2 +50,2 @@ * @param targetBuffer The first buffer. | ||
*/ | ||
export function copyToArray(source: ArrayBuffer|any, target: any, targetOffset: number): number; | ||
export function copyToArray(source: ArrayBuffer | any, target: any, targetOffset: number): number; |
@@ -9,2 +9,10 @@ export function padTo4Bytes(byteLength) { | ||
} | ||
export function concatenateArrayBuffers(source1, source2) { | ||
const sourceArray1 = source1 instanceof ArrayBuffer ? new Uint8Array(source1) : source1; | ||
const sourceArray2 = source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2; | ||
const temp = new Uint8Array(sourceArray1.byteLength + sourceArray2.byteLength); | ||
temp.set(sourceArray1, 0); | ||
temp.set(sourceArray2, sourceArray1.byteLength); | ||
return temp.buffer; | ||
} | ||
export function copyArrayBuffer(targetBuffer, sourceBuffer, byteOffset, byteLength = sourceBuffer.byteLength) { | ||
@@ -11,0 +19,0 @@ const targetArray = new Uint8Array(targetBuffer, byteOffset, byteLength); |
@@ -5,3 +5,3 @@ import { global, isBrowser, isWorker } from '../env-utils/globals'; | ||
const LATEST = 'beta'; | ||
const VERSION = typeof "2.2.5" !== 'undefined' ? "2.2.5" : LATEST; | ||
const VERSION = typeof "2.3.0-alpha.1" !== 'undefined' ? "2.3.0-alpha.1" : LATEST; | ||
const loadLibraryPromises = {}; | ||
@@ -8,0 +8,0 @@ export async function loadLibrary(libraryUrl, moduleName = null, options = {}) { |
import assert from './env-utils/assert'; | ||
const VERSION = typeof "2.2.5" !== 'undefined' ? "2.2.5" : ''; | ||
const VERSION = typeof "2.3.0-alpha.1" !== 'undefined' ? "2.3.0-alpha.1" : ''; | ||
export function validateLoaderVersion(loader, coreVersion = VERSION) { | ||
@@ -4,0 +4,0 @@ assert(loader, 'no loader provided'); |
@@ -36,1 +36,24 @@ /** | ||
}; | ||
export type LoaderContext = { | ||
fetch?: any; | ||
loaders?: LoaderObject[]; | ||
url?: string; | ||
parse?: (data: ArrayBuffer, options?: object) => Promise<any>; | ||
parseSync?: (data: ArrayBuffer, options?: object) => any; | ||
parseInBatches?: (data: AsyncIterator<any>, options?: object) => AsyncIterator<any>; | ||
}; | ||
/** Types that can be synchronously parsed */ | ||
export type SyncDataType = string | ArrayBuffer; // TODO File | Blob can be read synchronously... | ||
/** Types that can be parsed async */ | ||
export type DataType = string | ArrayBuffer | File | Blob | Response | ReadableStream; | ||
/** Types that can be parsed in batches */ | ||
export type BatchableDataType = | ||
DataType | | ||
Iterable<ArrayBuffer> | | ||
AsyncIterable<ArrayBuffer> | | ||
Promise<AsyncIterable<ArrayBuffer>>; |
@@ -1,2 +0,3 @@ | ||
export {LoaderObject, WriterObject} from './types/types'; | ||
export {LoaderObject, WriterObject, LoaderContext, DataType, SyncDataType, BatchableDataType, | ||
IFileSystem, IRandomAccessReadFileSystem} from './types'; | ||
@@ -61,4 +62,4 @@ export {default as createWorker} from './lib/create-worker'; | ||
} from './lib/iterator-utils/text-iterators'; | ||
export {forEach, concatenateChunksAsync} from './lib/iterator-utils/async-iteration'; | ||
// REQUEST UTILS | ||
@@ -65,0 +66,0 @@ export {default as RequestScheduler} from './lib/request-utils/request-scheduler'; |
@@ -17,4 +17,5 @@ export { default as createWorker } from './lib/create-worker'; | ||
export { makeTextEncoderIterator, makeTextDecoderIterator, makeLineIterator, makeNumberedLineIterator } from './lib/iterator-utils/text-iterators'; | ||
export { forEach, concatenateChunksAsync } from './lib/iterator-utils/async-iteration'; | ||
export { default as RequestScheduler } from './lib/request-utils/request-scheduler'; | ||
export { getMeshSize as _getMeshSize, getMeshBoundingBox } from './categories/mesh/mesh-utils'; | ||
//# sourceMappingURL=index.js.map |
/** | ||
* | ||
* @param byteLength | ||
* | ||
* @param byteLength | ||
*/ | ||
@@ -9,5 +9,5 @@ export function padTo4Bytes(byteLength); | ||
* Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0 | ||
* @param arrayBuffer | ||
* @param byteOffset | ||
* @param byteLength | ||
* @param arrayBuffer | ||
* @param byteOffset | ||
* @param byteLength | ||
*/ | ||
@@ -17,2 +17,13 @@ export function getZeroOffsetArrayBuffer(arrayBuffer, byteOffset, byteLength); | ||
/** | ||
* Concatenate two ArrayBuffers | ||
* @param source1 The first ArrayBuffer. | ||
* @param source2 The second ArrayBuffer. | ||
* @return A concatenated ArrayBuffer | ||
*/ | ||
export function concatenateArrayBuffers( | ||
source1: ArrayBuffer | Uint8Array, | ||
source2: ArrayBuffer | Uint8Array | ||
): ArrayBuffer; | ||
/** | ||
* Creates a new Uint8Array based on two different ArrayBuffers | ||
@@ -39,2 +50,2 @@ * @param targetBuffer The first buffer. | ||
*/ | ||
export function copyToArray(source: ArrayBuffer|any, target: any, targetOffset: number): number; | ||
export function copyToArray(source: ArrayBuffer | any, target: any, targetOffset: number): number; |
@@ -9,2 +9,10 @@ export function padTo4Bytes(byteLength) { | ||
} | ||
export function concatenateArrayBuffers(source1, source2) { | ||
var sourceArray1 = source1 instanceof ArrayBuffer ? new Uint8Array(source1) : source1; | ||
var sourceArray2 = source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2; | ||
var temp = new Uint8Array(sourceArray1.byteLength + sourceArray2.byteLength); | ||
temp.set(sourceArray1, 0); | ||
temp.set(sourceArray2, sourceArray1.byteLength); | ||
return temp.buffer; | ||
} | ||
export function copyArrayBuffer(targetBuffer, sourceBuffer, byteOffset) { | ||
@@ -11,0 +19,0 @@ var byteLength = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : sourceBuffer.byteLength; |
@@ -7,3 +7,3 @@ import _regeneratorRuntime from "@babel/runtime/regenerator"; | ||
var LATEST = 'beta'; | ||
var VERSION = typeof "2.2.5" !== 'undefined' ? "2.2.5" : LATEST; | ||
var VERSION = typeof "2.3.0-alpha.1" !== 'undefined' ? "2.3.0-alpha.1" : LATEST; | ||
var loadLibraryPromises = {}; | ||
@@ -10,0 +10,0 @@ export function loadLibrary(_x) { |
import assert from './env-utils/assert'; | ||
var VERSION = typeof "2.2.5" !== 'undefined' ? "2.2.5" : ''; | ||
var VERSION = typeof "2.3.0-alpha.1" !== 'undefined' ? "2.3.0-alpha.1" : ''; | ||
export function validateLoaderVersion(loader) { | ||
@@ -4,0 +4,0 @@ var coreVersion = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : VERSION; |
@@ -36,1 +36,24 @@ /** | ||
}; | ||
export type LoaderContext = { | ||
fetch?: any; | ||
loaders?: LoaderObject[]; | ||
url?: string; | ||
parse?: (data: ArrayBuffer, options?: object) => Promise<any>; | ||
parseSync?: (data: ArrayBuffer, options?: object) => any; | ||
parseInBatches?: (data: AsyncIterator<any>, options?: object) => AsyncIterator<any>; | ||
}; | ||
/** Types that can be synchronously parsed */ | ||
export type SyncDataType = string | ArrayBuffer; // TODO File | Blob can be read synchronously... | ||
/** Types that can be parsed async */ | ||
export type DataType = string | ArrayBuffer | File | Blob | Response | ReadableStream; | ||
/** Types that can be parsed in batches */ | ||
export type BatchableDataType = | ||
DataType | | ||
Iterable<ArrayBuffer> | | ||
AsyncIterable<ArrayBuffer> | | ||
Promise<AsyncIterable<ArrayBuffer>>; |
{ | ||
"name": "@loaders.gl/loader-utils", | ||
"version": "2.2.5", | ||
"version": "2.3.0-alpha.1", | ||
"description": "Framework-independent loaders for 3D graphics formats", | ||
@@ -44,3 +44,3 @@ "license": "MIT", | ||
}, | ||
"gitHead": "e76bd4c22137bba36e1c82745e4ecbd9ebdc2095" | ||
"gitHead": "a1469c35dbb0520a4a92f6a2242e5290e910a0d4" | ||
} |
@@ -1,2 +0,3 @@ | ||
export {LoaderObject, WriterObject} from './types/types'; | ||
export {LoaderObject, WriterObject, LoaderContext, DataType, SyncDataType, BatchableDataType, | ||
IFileSystem, IRandomAccessReadFileSystem} from './types'; | ||
@@ -61,4 +62,4 @@ export {default as createWorker} from './lib/create-worker'; | ||
} from './lib/iterator-utils/text-iterators'; | ||
export {forEach, concatenateChunksAsync} from './lib/iterator-utils/async-iteration'; | ||
// REQUEST UTILS | ||
@@ -65,0 +66,0 @@ export {default as RequestScheduler} from './lib/request-utils/request-scheduler'; |
@@ -60,2 +60,3 @@ export {default as createWorker} from './lib/create-worker'; | ||
} from './lib/iterator-utils/text-iterators'; | ||
export {forEach, concatenateChunksAsync} from './lib/iterator-utils/async-iteration'; | ||
@@ -62,0 +63,0 @@ // REQUEST UTILS |
/** | ||
* | ||
* @param byteLength | ||
* | ||
* @param byteLength | ||
*/ | ||
@@ -9,5 +9,5 @@ export function padTo4Bytes(byteLength); | ||
* Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0 | ||
* @param arrayBuffer | ||
* @param byteOffset | ||
* @param byteLength | ||
* @param arrayBuffer | ||
* @param byteOffset | ||
* @param byteLength | ||
*/ | ||
@@ -17,2 +17,13 @@ export function getZeroOffsetArrayBuffer(arrayBuffer, byteOffset, byteLength); | ||
/** | ||
* Concatenate two ArrayBuffers | ||
* @param source1 The first ArrayBuffer. | ||
* @param source2 The second ArrayBuffer. | ||
* @return A concatenated ArrayBuffer | ||
*/ | ||
export function concatenateArrayBuffers( | ||
source1: ArrayBuffer | Uint8Array, | ||
source2: ArrayBuffer | Uint8Array | ||
): ArrayBuffer; | ||
/** | ||
* Creates a new Uint8Array based on two different ArrayBuffers | ||
@@ -39,2 +50,2 @@ * @param targetBuffer The first buffer. | ||
*/ | ||
export function copyToArray(source: ArrayBuffer|any, target: any, targetOffset: number): number; | ||
export function copyToArray(source: ArrayBuffer | any, target: any, targetOffset: number): number; |
@@ -14,2 +14,12 @@ export function padTo4Bytes(byteLength) { | ||
// Concatenate two ArrayBuffers | ||
export function concatenateArrayBuffers(source1, source2) { | ||
const sourceArray1 = source1 instanceof ArrayBuffer ? new Uint8Array(source1) : source1; | ||
const sourceArray2 = source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2; | ||
const temp = new Uint8Array(sourceArray1.byteLength + sourceArray2.byteLength); | ||
temp.set(sourceArray1, 0); | ||
temp.set(sourceArray2, sourceArray1.byteLength); | ||
return temp.buffer; | ||
} | ||
/* Creates a new Uint8Array based on two different ArrayBuffers | ||
@@ -16,0 +26,0 @@ * @private |
@@ -36,1 +36,24 @@ /** | ||
}; | ||
export type LoaderContext = { | ||
fetch?: any; | ||
loaders?: LoaderObject[]; | ||
url?: string; | ||
parse?: (data: ArrayBuffer, options?: object) => Promise<any>; | ||
parseSync?: (data: ArrayBuffer, options?: object) => any; | ||
parseInBatches?: (data: AsyncIterator<any>, options?: object) => AsyncIterator<any>; | ||
}; | ||
/** Types that can be synchronously parsed */ | ||
export type SyncDataType = string | ArrayBuffer; // TODO File | Blob can be read synchronously... | ||
/** Types that can be parsed async */ | ||
export type DataType = string | ArrayBuffer | File | Blob | Response | ReadableStream; | ||
/** Types that can be parsed in batches */ | ||
export type BatchableDataType = | ||
DataType | | ||
Iterable<ArrayBuffer> | | ||
AsyncIterable<ArrayBuffer> | | ||
Promise<AsyncIterable<ArrayBuffer>>; |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
422993
216
5857
2