@zenfs/core
Advanced tools
| /// <reference types="node" resolution-mode="require"/> | ||
| import type { ParsedPath } from 'node:path'; | ||
| export declare const cwd = "/"; | ||
| export declare const sep = "/"; | ||
| export declare function normalizeString(path: string, allowAboveRoot: boolean): string; | ||
| export declare function formatExt(ext: string): string; | ||
| export declare function resolve(...args: string[]): string; | ||
| export declare function normalize(path: string): string; | ||
| export declare function isAbsolute(path: string): boolean; | ||
| export declare function join(...args: string[]): string; | ||
| export declare function relative(from: string, to: string): string; | ||
| export declare function dirname(path: string): string; | ||
| export declare function basename(path: string, suffix?: string): string; | ||
| export declare function extname(path: string): string; | ||
| export declare function format(pathObject: ParsedPath): string; | ||
| export declare function parse(path: string): ParsedPath; |
| /* | ||
| Copyright Joyent, Inc. and other Node contributors. | ||
| Permission is hereby granted, free of charge, to any person obtaining a | ||
| copy of this software and associated documentation files (the | ||
| "Software"), to deal in the Software without restriction, including | ||
| without limitation the rights to use, copy, modify, merge, publish, | ||
| distribute, sublicense, and/or sell copies of the Software, and to permit | ||
| persons to whom the Software is furnished to do so, subject to the | ||
| following conditions: | ||
| The above copyright notice and this permission notice shall be included | ||
| in all copies or substantial portions of the Software. | ||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | ||
| OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | ||
| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | ||
| NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | ||
| DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | ||
| OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | ||
| USE OR OTHER DEALINGS IN THE SOFTWARE. | ||
| */ | ||
| export const cwd = '/'; | ||
| export const sep = '/'; | ||
| function validateString(str, name) { | ||
| if (typeof str != 'string') { | ||
| throw new TypeError(`"${name}" is not a string`); | ||
| } | ||
| } | ||
| function validateObject(str, name) { | ||
| if (typeof str != 'object') { | ||
| throw new TypeError(`"${name}" is not an object`); | ||
| } | ||
| } | ||
| // Resolves . and .. elements in a path with directory names | ||
| export function normalizeString(path, allowAboveRoot) { | ||
| let res = ''; | ||
| let lastSegmentLength = 0; | ||
| let lastSlash = -1; | ||
| let dots = 0; | ||
| let char = '\x00'; | ||
| for (let i = 0; i <= path.length; ++i) { | ||
| if (i < path.length) { | ||
| char = path[i]; | ||
| } | ||
| else if (char == '/') { | ||
| break; | ||
| } | ||
| else { | ||
| char = '/'; | ||
| } | ||
| if (char == '/') { | ||
| if (lastSlash === i - 1 || dots === 1) { | ||
| // NOOP | ||
| } | ||
| else if (dots === 2) { | ||
| if (res.length < 2 || lastSegmentLength !== 2 || res.at(-1) !== '.' || res.at(-2) !== '.') { | ||
| if (res.length > 2) { | ||
| const lastSlashIndex = res.lastIndexOf('/'); | ||
| if (lastSlashIndex === -1) { | ||
| res = ''; | ||
| lastSegmentLength = 0; | ||
| } | ||
| else { | ||
| res = res.slice(0, lastSlashIndex); | ||
| lastSegmentLength = res.length - 1 - res.lastIndexOf('/'); | ||
| } | ||
| lastSlash = i; | ||
| dots = 0; | ||
| continue; | ||
| } | ||
| else if (res.length !== 0) { | ||
| res = ''; | ||
| lastSegmentLength = 0; | ||
| lastSlash = i; | ||
| dots = 0; | ||
| continue; | ||
| } | ||
| } | ||
| if (allowAboveRoot) { | ||
| res += res.length > 0 ? '/..' : '..'; | ||
| lastSegmentLength = 2; | ||
| } | ||
| } | ||
| else { | ||
| if (res.length > 0) | ||
| res += '/' + path.slice(lastSlash + 1, i); | ||
| else | ||
| res = path.slice(lastSlash + 1, i); | ||
| lastSegmentLength = i - lastSlash - 1; | ||
| } | ||
| lastSlash = i; | ||
| dots = 0; | ||
| } | ||
| else if (char === '.' && dots !== -1) { | ||
| ++dots; | ||
| } | ||
| else { | ||
| dots = -1; | ||
| } | ||
| } | ||
| return res; | ||
| } | ||
| export function formatExt(ext) { | ||
| return ext ? `${ext[0] === '.' ? '' : '.'}${ext}` : ''; | ||
| } | ||
| export function resolve(...args) { | ||
| let resolvedPath = ''; | ||
| let resolvedAbsolute = false; | ||
| for (let i = args.length - 1; i >= -1 && !resolvedAbsolute; i--) { | ||
| const path = i >= 0 ? args[i] : cwd; | ||
| validateString(path, `paths[${i}]`); | ||
| // Skip empty entries | ||
| if (path.length === 0) { | ||
| continue; | ||
| } | ||
| resolvedPath = `${path}/${resolvedPath}`; | ||
| resolvedAbsolute = path[0] === '/'; | ||
| } | ||
| // At this point the path should be resolved to a full absolute path, but | ||
| // handle relative paths to be safe (might happen when process.cwd() fails) | ||
| // Normalize the path | ||
| resolvedPath = normalizeString(resolvedPath, !resolvedAbsolute); | ||
| if (resolvedAbsolute) { | ||
| return `/${resolvedPath}`; | ||
| } | ||
| return resolvedPath.length > 0 ? resolvedPath : '.'; | ||
| } | ||
| export function normalize(path) { | ||
| validateString(path, 'path'); | ||
| if (path.length === 0) | ||
| return '.'; | ||
| const isAbsolute = path[0] === '/'; | ||
| const trailingSeparator = path.at(-1) === '/'; | ||
| // Normalize the path | ||
| path = normalizeString(path, !isAbsolute); | ||
| if (path.length === 0) { | ||
| if (isAbsolute) | ||
| return '/'; | ||
| return trailingSeparator ? './' : '.'; | ||
| } | ||
| if (trailingSeparator) | ||
| path += '/'; | ||
| return isAbsolute ? `/${path}` : path; | ||
| } | ||
| export function isAbsolute(path) { | ||
| validateString(path, 'path'); | ||
| return path.length > 0 && path[0] === '/'; | ||
| } | ||
| export function join(...args) { | ||
| if (args.length === 0) | ||
| return '.'; | ||
| let joined; | ||
| for (let i = 0; i < args.length; ++i) { | ||
| const arg = args[i]; | ||
| validateString(arg, 'path'); | ||
| if (arg.length > 0) { | ||
| if (joined === undefined) | ||
| joined = arg; | ||
| else | ||
| joined += `/${arg}`; | ||
| } | ||
| } | ||
| if (joined === undefined) | ||
| return '.'; | ||
| return normalize(joined); | ||
| } | ||
| export function relative(from, to) { | ||
| validateString(from, 'from'); | ||
| validateString(to, 'to'); | ||
| if (from === to) | ||
| return ''; | ||
| // Trim leading forward slashes. | ||
| from = resolve(from); | ||
| to = resolve(to); | ||
| if (from === to) | ||
| return ''; | ||
| const fromStart = 1; | ||
| const fromEnd = from.length; | ||
| const fromLen = fromEnd - fromStart; | ||
| const toStart = 1; | ||
| const toLen = to.length - toStart; | ||
| // Compare paths to find the longest common path from root | ||
| const length = fromLen < toLen ? fromLen : toLen; | ||
| let lastCommonSep = -1; | ||
| let i = 0; | ||
| for (; i < length; i++) { | ||
| const fromCode = from[fromStart + i]; | ||
| if (fromCode !== to[toStart + i]) | ||
| break; | ||
| else if (fromCode === '/') | ||
| lastCommonSep = i; | ||
| } | ||
| if (i === length) { | ||
| if (toLen > length) { | ||
| if (to[toStart + i] === '/') { | ||
| // We get here if `from` is the exact base path for `to`. | ||
| // For example: from='/foo/bar'; to='/foo/bar/baz' | ||
| return to.slice(toStart + i + 1); | ||
| } | ||
| if (i === 0) { | ||
| // We get here if `from` is the root | ||
| // For example: from='/'; to='/foo' | ||
| return to.slice(toStart + i); | ||
| } | ||
| } | ||
| else if (fromLen > length) { | ||
| if (from[fromStart + i] === '/') { | ||
| // We get here if `to` is the exact base path for `from`. | ||
| // For example: from='/foo/bar/baz'; to='/foo/bar' | ||
| lastCommonSep = i; | ||
| } | ||
| else if (i === 0) { | ||
| // We get here if `to` is the root. | ||
| // For example: from='/foo/bar'; to='/' | ||
| lastCommonSep = 0; | ||
| } | ||
| } | ||
| } | ||
| let out = ''; | ||
| // Generate the relative path based on the path difference between `to` | ||
| // and `from`. | ||
| for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { | ||
| if (i === fromEnd || from[i] === '/') { | ||
| out += out.length === 0 ? '..' : '/..'; | ||
| } | ||
| } | ||
| // Lastly, append the rest of the destination (`to`) path that comes after | ||
| // the common path parts. | ||
| return `${out}${to.slice(toStart + lastCommonSep)}`; | ||
| } | ||
| export function dirname(path) { | ||
| validateString(path, 'path'); | ||
| if (path.length === 0) | ||
| return '.'; | ||
| const hasRoot = path[0] === '/'; | ||
| let end = -1; | ||
| let matchedSlash = true; | ||
| for (let i = path.length - 1; i >= 1; --i) { | ||
| if (path[i] === '/') { | ||
| if (!matchedSlash) { | ||
| end = i; | ||
| break; | ||
| } | ||
| } | ||
| else { | ||
| // We saw the first non-path separator | ||
| matchedSlash = false; | ||
| } | ||
| } | ||
| if (end === -1) | ||
| return hasRoot ? '/' : '.'; | ||
| if (hasRoot && end === 1) | ||
| return '//'; | ||
| return path.slice(0, end); | ||
| } | ||
| export function basename(path, suffix) { | ||
| if (suffix !== undefined) | ||
| validateString(suffix, 'ext'); | ||
| validateString(path, 'path'); | ||
| let start = 0; | ||
| let end = -1; | ||
| let matchedSlash = true; | ||
| if (suffix !== undefined && suffix.length > 0 && suffix.length <= path.length) { | ||
| if (suffix === path) | ||
| return ''; | ||
| let extIdx = suffix.length - 1; | ||
| let firstNonSlashEnd = -1; | ||
| for (let i = path.length - 1; i >= 0; --i) { | ||
| if (path[i] === '/') { | ||
| // If we reached a path separator that was not part of a set of path | ||
| // separators at the end of the string, stop now | ||
| if (!matchedSlash) { | ||
| start = i + 1; | ||
| break; | ||
| } | ||
| } | ||
| else { | ||
| if (firstNonSlashEnd === -1) { | ||
| // We saw the first non-path separator, remember this index in case | ||
| // we need it if the extension ends up not matching | ||
| matchedSlash = false; | ||
| firstNonSlashEnd = i + 1; | ||
| } | ||
| if (extIdx >= 0) { | ||
| // Try to match the explicit extension | ||
| if (path[i] === suffix[extIdx]) { | ||
| if (--extIdx === -1) { | ||
| // We matched the extension, so mark this as the end of our path | ||
| // component | ||
| end = i; | ||
| } | ||
| } | ||
| else { | ||
| // Extension does not match, so our result is the entire path | ||
| // component | ||
| extIdx = -1; | ||
| end = firstNonSlashEnd; | ||
| } | ||
| } | ||
| } | ||
| } | ||
| if (start === end) | ||
| end = firstNonSlashEnd; | ||
| else if (end === -1) | ||
| end = path.length; | ||
| return path.slice(start, end); | ||
| } | ||
| for (let i = path.length - 1; i >= 0; --i) { | ||
| if (path[i] === '/') { | ||
| // If we reached a path separator that was not part of a set of path | ||
| // separators at the end of the string, stop now | ||
| if (!matchedSlash) { | ||
| start = i + 1; | ||
| break; | ||
| } | ||
| } | ||
| else if (end === -1) { | ||
| // We saw the first non-path separator, mark this as the end of our | ||
| // path component | ||
| matchedSlash = false; | ||
| end = i + 1; | ||
| } | ||
| } | ||
| if (end === -1) | ||
| return ''; | ||
| return path.slice(start, end); | ||
| } | ||
| export function extname(path) { | ||
| validateString(path, 'path'); | ||
| let startDot = -1; | ||
| let startPart = 0; | ||
| let end = -1; | ||
| let matchedSlash = true; | ||
| // Track the state of characters (if any) we see before our first dot and | ||
| // after any path separator we find | ||
| let preDotState = 0; | ||
| for (let i = path.length - 1; i >= 0; --i) { | ||
| if (path[i] === '/') { | ||
| // If we reached a path separator that was not part of a set of path | ||
| // separators at the end of the string, stop now | ||
| if (!matchedSlash) { | ||
| startPart = i + 1; | ||
| break; | ||
| } | ||
| continue; | ||
| } | ||
| if (end === -1) { | ||
| // We saw the first non-path separator, mark this as the end of our | ||
| // extension | ||
| matchedSlash = false; | ||
| end = i + 1; | ||
| } | ||
| if (path[i] === '.') { | ||
| // If this is our first dot, mark it as the start of our extension | ||
| if (startDot === -1) | ||
| startDot = i; | ||
| else if (preDotState !== 1) | ||
| preDotState = 1; | ||
| } | ||
| else if (startDot !== -1) { | ||
| // We saw a non-dot and non-path separator before our dot, so we should | ||
| // have a good chance at having a non-empty extension | ||
| preDotState = -1; | ||
| } | ||
| } | ||
| if (startDot === -1 || | ||
| end === -1 || | ||
| // We saw a non-dot character immediately before the dot | ||
| preDotState === 0 || | ||
| // The (right-most) trimmed path component is exactly '..' | ||
| (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1)) { | ||
| return ''; | ||
| } | ||
| return path.slice(startDot, end); | ||
| } | ||
| export function format(pathObject) { | ||
| validateObject(pathObject, 'pathObject'); | ||
| const dir = pathObject.dir || pathObject.root; | ||
| const base = pathObject.base || `${pathObject.name || ''}${formatExt(pathObject.ext)}`; | ||
| if (!dir) { | ||
| return base; | ||
| } | ||
| return dir === pathObject.root ? `${dir}${base}` : `${dir}/${base}`; | ||
| } | ||
| export function parse(path) { | ||
| validateString(path, 'path'); | ||
| const isAbsolute = path[0] === '/'; | ||
| const ret = { root: isAbsolute ? '/' : '', dir: '', base: '', ext: '', name: '' }; | ||
| if (path.length === 0) | ||
| return ret; | ||
| const start = isAbsolute ? 1 : 0; | ||
| let startDot = -1; | ||
| let startPart = 0; | ||
| let end = -1; | ||
| let matchedSlash = true; | ||
| let i = path.length - 1; | ||
| // Track the state of characters (if any) we see before our first dot and | ||
| // after any path separator we find | ||
| let preDotState = 0; | ||
| // Get non-dir info | ||
| for (; i >= start; --i) { | ||
| if (path[i] === '/') { | ||
| // If we reached a path separator that was not part of a set of path | ||
| // separators at the end of the string, stop now | ||
| if (!matchedSlash) { | ||
| startPart = i + 1; | ||
| break; | ||
| } | ||
| continue; | ||
| } | ||
| if (end === -1) { | ||
| // We saw the first non-path separator, mark this as the end of our | ||
| // extension | ||
| matchedSlash = false; | ||
| end = i + 1; | ||
| } | ||
| if (path[i] === '.') { | ||
| // If this is our first dot, mark it as the start of our extension | ||
| if (startDot === -1) | ||
| startDot = i; | ||
| else if (preDotState !== 1) | ||
| preDotState = 1; | ||
| } | ||
| else if (startDot !== -1) { | ||
| // We saw a non-dot and non-path separator before our dot, so we should | ||
| // have a good chance at having a non-empty extension | ||
| preDotState = -1; | ||
| } | ||
| } | ||
| if (end !== -1) { | ||
| const start = startPart === 0 && isAbsolute ? 1 : startPart; | ||
| if (startDot === -1 || | ||
| // We saw a non-dot character immediately before the dot | ||
| preDotState === 0 || | ||
| // The (right-most) trimmed path component is exactly '..' | ||
| (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1)) { | ||
| ret.base = ret.name = path.slice(start, end); | ||
| } | ||
| else { | ||
| ret.name = path.slice(start, startDot); | ||
| ret.base = path.slice(start, end); | ||
| ret.ext = path.slice(startDot, end); | ||
| } | ||
| } | ||
| if (startPart > 0) | ||
| ret.dir = path.slice(0, startPart - 1); | ||
| else if (isAbsolute) | ||
| ret.dir = '/'; | ||
| return ret; | ||
| } |
+297
| # ZenFS | ||
| ZenFS is an in-browser file system that emulates the [Node JS file system API](http://nodejs.org/api/fs.html) and supports storing and retrieving files from various backends. ZenFS also integrates nicely into the Emscripten file system. | ||
| ## Backends | ||
| ZenFS is highly extensible, and includes many builtin filesystem backends: | ||
| - `InMemory`: Stores files in-memory. It is a temporary file store that clears when the user navigates away. | ||
| - `OverlayFS`: Mount a read-only file system as read-write by overlaying a writable file system on top of it. Like Docker's overlayfs, it will only write changed files to the writable file system. | ||
| - `AsyncMirror`: Use an asynchronous backend synchronously. Invaluable for Emscripten; let your Emscripten applications write to larger file stores with no additional effort! | ||
| - `AsyncMirror` loads the entire contents of the async file system into a synchronous backend during construction. It performs operations synchronous file system and then queues them to be mirrored onto the asynchronous backend. | ||
| - `FolderAdapter`: Wraps a file system, and scopes all interactions to a subfolder of that file system. | ||
| More backends can be defined by separate libraries, so long as they extend they implement `ZenFS.FileSystem`. Multiple backends can be active at once at different locations in the directory hierarchy. | ||
| ZenFS supports a number of other backends (as `@zenfs/fs-[name]`). | ||
| For more information, see the [API documentation for ZenFS](https://zen-fs.github.io/core). | ||
| ## Installing | ||
| ```sh | ||
| npm install @zenfs/core | ||
| ``` | ||
| ## Building | ||
| - Make sure you have Node and NPM installed. You must have Node v18 or newer. | ||
| - Install dependencies with `npm install` | ||
| - Build using `npm run build` | ||
| - You can find the built code in `dist`. | ||
| ## Usage | ||
| > 🛈 The examples are written in ESM. If you are using CJS, you can `require` the package. If running in a browser you can add a script tag to your HTML pointing to the `browser.min.js` and use ZenFS via the global `ZenFS` object. | ||
| ```js | ||
| import fs from '@zenfs/core'; | ||
| fs.writeFileSync('/test.txt', 'Cool, I can do this in the browser!'); | ||
| const contents = fs.readFileSync('/test.txt', 'utf-8'); | ||
| console.log(contents); | ||
| ``` | ||
| #### Using different backends | ||
| A `InMemory` backend is created by default. If you would like to use a different one, you must configure ZenFS. It is recommended to do so using the `configure` function. Here is an example using the `Storage` backend from `@zenfs/fs-dom`: | ||
| ```js | ||
| import { configure, fs, registerBackend } from '@zenfs/core'; | ||
| import { StorageFileSystem } from '@zenfs/fs-dom'; | ||
| registerBackend(StorageFileSystem); | ||
| // you can also add a callback as the last parameter instead of using promises | ||
| await configure({ fs: 'Storage' }); | ||
| if (!fs.existsSync('/test.txt')) { | ||
| fs.writeFileSync('/test.txt', 'This will persist across reloads!'); | ||
| } | ||
| const contents = fs.readFileSync('/test.txt', 'utf-8'); | ||
| console.log(contents); | ||
| ``` | ||
| #### Using multiple backends | ||
| You can use multiple backends by passing an object to `configure` which maps paths to file systems. The following example mounts a zip file to `/zip`, in-memory storage to `/tmp`, and IndexedDB storage to `/home` (note that `/` has the default in-memory backend): | ||
| ```js | ||
| import { configure, registerBackend } from '@zenfs/core'; | ||
| import { IndexedDBFileSystem } from '@zenfs/fs-dom'; | ||
| import { ZipFS } from '@zenfs/fs-zip'; | ||
| import Buffer from 'buffer'; | ||
| registerBackend(IndexedDBFileSystem, ZipFS); | ||
| const zipData = await (await fetch('mydata.zip')).arrayBuffer(); | ||
| await configure({ | ||
| '/mnt/zip': { | ||
| fs: 'ZipFS', | ||
| options: { | ||
| zipData: Buffer.from(zipData) | ||
| } | ||
| }, | ||
| '/tmp': 'InMemory', | ||
| '/home': 'IndexedDB', | ||
| }; | ||
| ``` | ||
| #### FS Promises API | ||
| The FS promises API is exposed as `promises`. | ||
| ```js | ||
| import { configure, promises, registerBackend } from '@zenfs/core'; | ||
| import { IndexedDBFileSystem } from '@zenfs/fs-dom'; | ||
| registerBackend(IndexedDBFileSystem); | ||
| await configure({ '/': 'IndexedDB' }); | ||
| const exists = await promises.exists('/myfile.txt'); | ||
| if (!exists) { | ||
| await promises.write('/myfile.txt', 'Lots of persistant data'); | ||
| } | ||
| ``` | ||
| ZenFS does _not_ provide a seperate method for importing promises in its built form. If you are using Typescript, you can import the promises API from source code (perhaps to reduce you bundle size). Doing so it not recommended as the files may be moved without notice. | ||
| #### Using asynchronous backends synchronously | ||
| You may have noticed that attempting to use a synchronous method on an asynchronous backend (e.g. IndexedDB) results in a "not supplied" error (`ENOTSUP`). If you wish to use an asynchronous backend synchronously you need to wrap it in an `AsyncMirror`: | ||
| ```js | ||
| import { configure, fs } from '@zenfs/core'; | ||
| import { IndexedDBFileSystem } from '@zenfs/fs-dom'; | ||
| registerBackend(IndexedDBFileSystem); | ||
| await configure({ | ||
| '/': { fs: 'AsyncMirror', options: { sync: { fs: 'InMemory' }, async: { fs: 'IndexedDB' } } } | ||
| }); | ||
| fs.writeFileSync('/persistant.txt', 'My persistant data'); // This fails if you configure the FS as IndexedDB | ||
| ``` | ||
| ### Advanced usage | ||
| #### Creating backends | ||
| If you would like to create backends without configure, you may do so by importing the backend's class and calling its `Create` method. You can import the backend directly or with `backends`: | ||
| ```js | ||
| import { configure, backends, InMemory } from '@zenfs/core'; | ||
| console.log(backends.InMemory === InMemory) // they are the same | ||
| const inMemoryFS = await InMemory.Create(); | ||
| ``` | ||
| > ⚠ Instances of backends follow the ***internal*** ZenFS API. You should never use a backend's method unless you are extending a backend. | ||
| Coming soon: | ||
| ```js | ||
| import { configure, InMemory } from '@zenfs/core'; | ||
| const inMemoryFS = new InMemory(); | ||
| await inMemoryFS.whenReady(); | ||
| ``` | ||
| #### Mounting | ||
| If you would like to mount and unmount backends, you can do so using the `mount` and `umount` functions: | ||
| ```js | ||
| import { fs, InMemory } from '@zenfs/core'; | ||
| const inMemoryFS = await InMemory.Create(); // create an FS instance | ||
| fs.mount('/tmp', inMemoryFS); // mount | ||
| fs.umount('/tmp'); // unmount /tmp | ||
| ``` | ||
| This could be used in the "multiple backends" example like so: | ||
| ```js | ||
| import { IndexedDBFileSystem } from '@zenfs/fs-dom'; | ||
| import { ZipFS } from '@zenfs/fs-zip'; | ||
| import Buffer from 'buffer'; | ||
| registerBackend(IndexedDBFileSystem); | ||
| await configure({ | ||
| '/tmp': 'InMemory', | ||
| '/home': 'IndexedDB', | ||
| }; | ||
| fs.mkdirSync('/mnt'); | ||
| const res = await fetch('mydata.zip'); | ||
| const zipData = Buffer.from(await res.arrayBuffer()); | ||
| const zipFs = await ZipFS.Create({ zipData }); | ||
| fs.mount('/mnt/zip', zipFs); | ||
| // do stuff with the mounted zip | ||
| fs.umount('/mnt/zip'); // finished using the zip | ||
| ``` | ||
| ## Using with bundlers | ||
| ZenFS exports a drop-in for Node's `fs` module (up to the version of `@types/node` in package.json), so you can use it for your bundler of preference using the default export. | ||
| #### ESBuild | ||
| tsconfig.json | ||
| ```json | ||
| { | ||
| ... | ||
| "paths": { | ||
| "fs": ["node_modules/zenfs/dist/index.js"] | ||
| } | ||
| ... | ||
| } | ||
| ``` | ||
| [Why tsconfig.json?](https://stackoverflow.com/a/71935037/17637456) | ||
| Webpack: | ||
| ```js | ||
| module.exports = { | ||
| // ... | ||
| resolve: { | ||
| alias: { | ||
| fs: require.resolve('zenfs'), | ||
| }, | ||
| }, | ||
| // ... | ||
| }; | ||
| ``` | ||
| Rollup: | ||
| ```js | ||
| import alias from '@rollup/plugin-alias'; | ||
| export default { | ||
| // ... | ||
| plugins: [ | ||
| alias({ | ||
| entries: [{ find: 'fs', replacement: 'zenfs' }], | ||
| }), | ||
| ], | ||
| // ... | ||
| }; | ||
| ``` | ||
| ## Using with Emscripten | ||
| You can use any _synchronous_ ZenFS file systems with Emscripten. | ||
| ```js | ||
| import { EmscriptenFSPlugin } from '@zenfs/fs-emscripten'; | ||
| const BFS = new EmscriptenFSPlugin(); // Create a ZenFS Emscripten FS plugin. | ||
| FS.createFolder(FS.root, 'data', true, true); // Create the folder to turn into a mount point. | ||
| FS.mount(BFS, { root: '/' }, '/data'); // Mount BFS's root folder into /data. | ||
| ``` | ||
| If you want to use an asynchronous backend, you must wrap it in an `AsyncMirror`. | ||
| ### Testing | ||
| Run unit tests with `npm test`. | ||
| ### Citing | ||
| ZenFS is a component of the [Doppio](http://doppiojvm.org/) and [Browsix](https://browsix.org/) research projects from the PLASMA lab at the University of Massachusetts Amherst. If you decide to use ZenFS in a project that leads to a publication, please cite the academic papers on [Doppio](https://dl.acm.org/citation.cfm?doid=2594291.2594293) and [Browsix](https://dl.acm.org/citation.cfm?id=3037727): | ||
| > John Vilk and Emery D. Berger. Doppio: Breaking the Browser Language Barrier. In | ||
| > _Proceedings of the 35th ACM SIGPLAN Conference on Programming Language Design and Implementation_ | ||
| > (2014), pp. 508–518. | ||
| ```bibtex | ||
| @inproceedings{VilkDoppio, | ||
| author = {John Vilk and | ||
| Emery D. Berger}, | ||
| title = {{Doppio: Breaking the Browser Language Barrier}}, | ||
| booktitle = {Proceedings of the 35th {ACM} {SIGPLAN} Conference on Programming Language Design and Implementation}, | ||
| pages = {508--518}, | ||
| year = {2014}, | ||
| url = {http://doi.acm.org/10.1145/2594291.2594293}, | ||
| doi = {10.1145/2594291.2594293} | ||
| } | ||
| ``` | ||
| > Bobby Powers, John Vilk, and Emery D. Berger. Browsix: Bridging the Gap Between Unix and the Browser. In _Proceedings of the Twenty-Second International Conference on Architectural Support for Programming Languages and Operating Systems_ (2017), pp. 253–266. | ||
| ```bibtex | ||
| @inproceedings{PowersBrowsix, | ||
| author = {Bobby Powers and | ||
| John Vilk and | ||
| Emery D. Berger}, | ||
| title = {{Browsix: Bridging the Gap Between Unix and the Browser}}, | ||
| booktitle = {Proceedings of the Twenty-Second International Conference on Architectural | ||
| Support for Programming Languages and Operating Systems}, | ||
| pages = {253--266}, | ||
| year = {2017}, | ||
| url = {http://doi.acm.org/10.1145/3037697.3037727}, | ||
| doi = {10.1145/3037697.3037727} | ||
| } | ||
| ``` | ||
| ### License | ||
| ZenFS is licensed under the MIT License. See `LICENSE` for details. |
@@ -14,3 +14,3 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
| import { FileFlag, PreloadFile } from '../file.js'; | ||
| import * as path from 'path'; | ||
| import { join } from '../emulation/path.js'; | ||
| import { Cred } from '../cred.js'; | ||
@@ -189,3 +189,3 @@ import { CreateBackend } from './backend.js'; | ||
| for (const file of files) { | ||
| yield copyItem(path.join(p, file)); | ||
| yield copyItem(join(p, file)); | ||
| } | ||
@@ -192,0 +192,0 @@ }), copyFile = (p, mode) => __awaiter(this, void 0, void 0, function* () { |
@@ -10,3 +10,3 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
| }; | ||
| import * as path from 'path'; | ||
| import { dirname, basename, join, resolve } from '../emulation/path.js'; | ||
| import { ApiError, ErrorCode } from '../ApiError.js'; | ||
@@ -208,3 +208,3 @@ import { W_OK, R_OK } from '../emulation/constants.js'; | ||
| try { | ||
| const tx = this.store.beginTransaction('readwrite'), oldParent = path.dirname(oldPath), oldName = path.basename(oldPath), newParent = path.dirname(newPath), newName = path.basename(newPath), | ||
| const tx = this.store.beginTransaction('readwrite'), oldParent = dirname(oldPath), oldName = basename(oldPath), newParent = dirname(newPath), newName = basename(newPath), | ||
| // Remove oldPath from parent's directory listing. | ||
@@ -355,3 +355,3 @@ oldDirNode = yield this.findINode(tx, oldParent), oldDirList = yield this.getDirListing(tx, oldParent, oldDirNode); | ||
| // We use the _findInode helper because we actually need the INode id. | ||
| fileInodeId = yield this._findINode(tx, path.dirname(p), path.basename(p)), fileInode = yield this.getINode(tx, p, fileInodeId), inodeChanged = fileInode.update(stats); | ||
| fileInodeId = yield this._findINode(tx, dirname(p), basename(p)), fileInode = yield this.getINode(tx, p, fileInodeId), inodeChanged = fileInode.update(stats); | ||
| try { | ||
@@ -399,3 +399,3 @@ // Sync data. | ||
| return __awaiter(this, void 0, void 0, function* () { | ||
| const currentPath = path.posix.join(parent, filename); | ||
| const currentPath = join(parent, filename); | ||
| if (visited.has(currentPath)) { | ||
@@ -431,3 +431,3 @@ throw new ApiError(ErrorCode.EIO, 'Infinite loop detected while finding inode', currentPath); | ||
| else { | ||
| throw ApiError.ENOENT(path.resolve(parent, filename)); | ||
| throw ApiError.ENOENT(resolve(parent, filename)); | ||
| } | ||
@@ -449,3 +449,3 @@ } | ||
| else { | ||
| throw ApiError.ENOENT(path.resolve(parent, filename)); | ||
| throw ApiError.ENOENT(resolve(parent, filename)); | ||
| } | ||
@@ -462,3 +462,3 @@ } | ||
| return __awaiter(this, void 0, void 0, function* () { | ||
| const id = yield this._findINode(tx, path.dirname(p), path.basename(p), visited); | ||
| const id = yield this._findINode(tx, dirname(p), basename(p), visited); | ||
| return this.getINode(tx, p, id); | ||
@@ -542,3 +542,3 @@ }); | ||
| return __awaiter(this, void 0, void 0, function* () { | ||
| const parentDir = path.dirname(p), fname = path.basename(p), parentNode = yield this.findINode(tx, parentDir), dirListing = yield this.getDirListing(tx, parentDir, parentNode), currTime = new Date().getTime(); | ||
| const parentDir = dirname(p), fname = basename(p), parentNode = yield this.findINode(tx, parentDir), dirListing = yield this.getDirListing(tx, parentDir, parentNode), currTime = new Date().getTime(); | ||
| //Check that the creater has correct access | ||
@@ -594,3 +594,3 @@ if (!parentNode.toStats().hasAccess(W_OK, cred)) { | ||
| } | ||
| const tx = this.store.beginTransaction('readwrite'), parent = path.dirname(p), parentNode = yield this.findINode(tx, parent), parentListing = yield this.getDirListing(tx, parent, parentNode), fileName = path.basename(p); | ||
| const tx = this.store.beginTransaction('readwrite'), parent = dirname(p), parentNode = yield this.findINode(tx, parent), parentListing = yield this.getDirListing(tx, parent, parentNode), fileName = basename(p); | ||
| if (!parentListing[fileName]) { | ||
@@ -597,0 +597,0 @@ throw ApiError.ENOENT(p); |
@@ -12,3 +12,3 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
| import { BaseFileSystem } from '../filesystem.js'; | ||
| import * as path from 'path'; | ||
| import { relative, join } from '../emulation/path.js'; | ||
| import { ApiError } from '../ApiError.js'; | ||
@@ -84,3 +84,3 @@ import { Cred } from '../cred.js'; | ||
| if (p) { | ||
| p = '/' + path.relative(folder, p); | ||
| p = '/' + relative(folder, p); | ||
| err.message = err.message.replace(err.path, p); | ||
@@ -117,6 +117,6 @@ err.path = p; | ||
| if (wrapFirst) { | ||
| arguments[0] = path.join(this._folder, arguments[0]); | ||
| arguments[0] = join(this._folder, arguments[0]); | ||
| } | ||
| if (wrapSecond) { | ||
| arguments[1] = path.join(this._folder, arguments[1]); | ||
| arguments[1] = join(this._folder, arguments[1]); | ||
| } | ||
@@ -133,6 +133,6 @@ arguments[arguments.length - 1] = wrapCallback(this._folder, arguments[arguments.length - 1]); | ||
| if (wrapFirst) { | ||
| arguments[0] = path.join(this._folder, arguments[0]); | ||
| arguments[0] = join(this._folder, arguments[0]); | ||
| } | ||
| if (wrapSecond) { | ||
| arguments[1] = path.join(this._folder, arguments[1]); | ||
| arguments[1] = join(this._folder, arguments[1]); | ||
| } | ||
@@ -139,0 +139,0 @@ return this._wrapped[name].apply(this._wrapped, arguments); |
@@ -16,3 +16,3 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
| import LockedFS from './Locked.js'; | ||
| import * as path from 'path'; | ||
| import { resolve, dirname } from '../emulation/path.js'; | ||
| import { Cred } from '../cred.js'; | ||
@@ -191,3 +191,3 @@ import { CreateBackend } from './backend.js'; | ||
| // Recursion! Should work for any nested files / folders. | ||
| yield this.rename(path.resolve(oldPath, name), path.resolve(newPath, name), cred); | ||
| yield this.rename(resolve(oldPath, name), resolve(newPath, name), cred); | ||
| } | ||
@@ -247,3 +247,3 @@ } | ||
| // Recursion! Should work for any nested files / folders. | ||
| this.renameSync(path.resolve(oldPath, name), path.resolve(newPath, name), cred); | ||
| this.renameSync(resolve(oldPath, name), resolve(newPath, name), cred); | ||
| }); | ||
@@ -628,6 +628,6 @@ } | ||
| createParentDirectories(p, cred) { | ||
| let parent = path.dirname(p), toCreate = []; | ||
| let parent = dirname(p), toCreate = []; | ||
| while (!this._writable.existsSync(parent, cred)) { | ||
| toCreate.push(parent); | ||
| parent = path.dirname(parent); | ||
| parent = dirname(parent); | ||
| } | ||
@@ -641,6 +641,6 @@ toCreate = toCreate.reverse(); | ||
| return __awaiter(this, void 0, void 0, function* () { | ||
| let parent = path.dirname(p), toCreate = []; | ||
| let parent = dirname(p), toCreate = []; | ||
| while (!(yield this._writable.exists(parent, cred))) { | ||
| toCreate.push(parent); | ||
| parent = path.dirname(parent); | ||
| parent = dirname(parent); | ||
| } | ||
@@ -647,0 +647,0 @@ toCreate = toCreate.reverse(); |
| /// <reference types="node" resolution-mode="require"/> | ||
| import { Cred } from '../cred.js'; | ||
| import { File, FileFlag, PreloadFile } from '../file.js'; | ||
| import { FileFlag, PreloadFile } from '../file.js'; | ||
| import { SynchronousFileSystem } from '../filesystem.js'; | ||
@@ -129,3 +129,3 @@ import { Stats } from '../stats.js'; | ||
| } | ||
| export declare class SyncKeyValueFile extends PreloadFile<SyncKeyValueFileSystem> implements File { | ||
| export declare class SyncKeyValueFile extends PreloadFile<SyncKeyValueFileSystem> { | ||
| constructor(_fs: SyncKeyValueFileSystem, _path: string, _flag: FileFlag, _stat: Stats, contents?: Buffer); | ||
@@ -160,4 +160,4 @@ syncSync(): void; | ||
| statSync(p: string, cred: Cred): Stats; | ||
| createFileSync(p: string, flag: FileFlag, mode: number, cred: Cred): File; | ||
| openFileSync(p: string, flag: FileFlag, cred: Cred): File; | ||
| createFileSync(p: string, flag: FileFlag, mode: number, cred: Cred): SyncKeyValueFile; | ||
| openFileSync(p: string, flag: FileFlag, cred: Cred): SyncKeyValueFile; | ||
| unlinkSync(p: string, cred: Cred): void; | ||
@@ -164,0 +164,0 @@ rmdirSync(p: string, cred: Cred): void; |
@@ -1,2 +0,2 @@ | ||
| import * as path from 'path'; | ||
| import { dirname, basename, join, resolve, sep } from '../emulation/path.js'; | ||
| import { ApiError, ErrorCode } from '../ApiError.js'; | ||
@@ -146,3 +146,3 @@ import { W_OK, R_OK } from '../emulation/constants.js'; | ||
| renameSync(oldPath, newPath, cred) { | ||
| const tx = this.store.beginTransaction('readwrite'), oldParent = path.dirname(oldPath), oldName = path.basename(oldPath), newParent = path.dirname(newPath), newName = path.basename(newPath), | ||
| const tx = this.store.beginTransaction('readwrite'), oldParent = dirname(oldPath), oldName = basename(oldPath), newParent = dirname(newPath), newName = basename(newPath), | ||
| // Remove oldPath from parent's directory listing. | ||
@@ -267,3 +267,3 @@ oldDirNode = this.findINode(tx, oldParent), oldDirList = this.getDirListing(tx, oldParent, oldDirNode); | ||
| // We use the _findInode helper because we actually need the INode id. | ||
| fileInodeId = this._findINode(tx, path.dirname(p), path.basename(p)), fileInode = this.getINode(tx, p, fileInodeId), inodeChanged = fileInode.update(stats); | ||
| fileInodeId = this._findINode(tx, dirname(p), basename(p)), fileInode = this.getINode(tx, p, fileInodeId), inodeChanged = fileInode.update(stats); | ||
| try { | ||
@@ -308,3 +308,3 @@ // Sync data. | ||
| _findINode(tx, parent, filename, visited = new Set()) { | ||
| const currentPath = path.posix.join(parent, filename); | ||
| const currentPath = join(parent, filename); | ||
| if (visited.has(currentPath)) { | ||
@@ -322,3 +322,3 @@ throw new ApiError(ErrorCode.EIO, 'Infinite loop detected while finding inode', currentPath); | ||
| else { | ||
| throw ApiError.ENOENT(path.resolve(parent, filename)); | ||
| throw ApiError.ENOENT(resolve(parent, filename)); | ||
| } | ||
@@ -337,3 +337,3 @@ }; | ||
| else { | ||
| return readDirectory(this.getINode(tx, parent + path.sep + filename, this._findINode(tx, path.dirname(parent), path.basename(parent), visited))); | ||
| return readDirectory(this.getINode(tx, parent + sep + filename, this._findINode(tx, dirname(parent), basename(parent), visited))); | ||
| } | ||
@@ -348,3 +348,3 @@ } | ||
| findINode(tx, p) { | ||
| return this.getINode(tx, p, this._findINode(tx, path.dirname(p), path.basename(p))); | ||
| return this.getINode(tx, p, this._findINode(tx, dirname(p), basename(p))); | ||
| } | ||
@@ -409,3 +409,3 @@ /** | ||
| commitNewFile(tx, p, type, mode, cred, data) { | ||
| const parentDir = path.dirname(p), fname = path.basename(p), parentNode = this.findINode(tx, parentDir), dirListing = this.getDirListing(tx, parentDir, parentNode), currTime = new Date().getTime(); | ||
| const parentDir = dirname(p), fname = basename(p), parentNode = this.findINode(tx, parentDir), dirListing = this.getDirListing(tx, parentDir, parentNode), currTime = new Date().getTime(); | ||
| //Check that the creater has correct access | ||
@@ -450,3 +450,3 @@ if (!parentNode.toStats().hasAccess(0b0100 /* Write */, cred)) { | ||
| removeEntry(p, isDir, cred) { | ||
| const tx = this.store.beginTransaction('readwrite'), parent = path.dirname(p), parentNode = this.findINode(tx, parent), parentListing = this.getDirListing(tx, parent, parentNode), fileName = path.basename(p); | ||
| const tx = this.store.beginTransaction('readwrite'), parent = dirname(p), parentNode = this.findINode(tx, parent), parentListing = this.getDirListing(tx, parent, parentNode), fileName = basename(p); | ||
| if (!parentListing[fileName]) { | ||
@@ -453,0 +453,0 @@ throw ApiError.ENOENT(p); |
| // Utilities and shared data | ||
| import { posix as path } from 'path'; | ||
| import { resolve } from './path.js'; | ||
| import { ApiError, ErrorCode } from '../ApiError.js'; | ||
@@ -55,3 +55,3 @@ import { Cred } from '../cred.js'; | ||
| p = p.replaceAll(/\/+/g, '/'); | ||
| return path.resolve(p); | ||
| return resolve(p); | ||
| } | ||
@@ -133,3 +133,3 @@ export function normalizeOptions(options, defEnc, defFlag, defMode) { | ||
| } | ||
| mountPoint = path.resolve(mountPoint); | ||
| mountPoint = resolve(mountPoint); | ||
| if (mounts.has(mountPoint)) { | ||
@@ -147,3 +147,3 @@ throw new ApiError(ErrorCode.EINVAL, 'Mount point ' + mountPoint + ' is already in use.'); | ||
| } | ||
| mountPoint = path.resolve(mountPoint); | ||
| mountPoint = resolve(mountPoint); | ||
| if (!mounts.has(mountPoint)) { | ||
@@ -150,0 +150,0 @@ throw new ApiError(ErrorCode.EINVAL, 'Mount point ' + mountPoint + ' is already unmounted.'); |
| import { Stats, FileType } from './stats.js'; | ||
| import * as path from 'path'; | ||
| import * as path from './emulation/path.js'; | ||
| /** | ||
@@ -222,3 +222,3 @@ * A simple class for storing a filesystem index. Assumes that all paths passed | ||
| const dirpath = path.dirname(p); | ||
| const itemname = p.substr(dirpath.length + (dirpath === '/' ? 0 : 1)); | ||
| const itemname = p.slice(dirpath.length + (dirpath === '/' ? 0 : 1)); | ||
| return [dirpath, itemname]; | ||
@@ -225,0 +225,0 @@ } |
@@ -15,3 +15,3 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ | ||
| import { FileFlag, ActionType } from './file.js'; | ||
| import * as path from 'path'; | ||
| import * as path from './emulation/path.js'; | ||
| import { Buffer } from 'buffer'; | ||
@@ -18,0 +18,0 @@ /** |
+1
-1
@@ -11,3 +11,3 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
| import { ErrorCode, ApiError } from './ApiError.js'; | ||
| import * as path from 'path'; | ||
| import * as path from './emulation/path.js'; | ||
| import { Buffer } from 'buffer'; | ||
@@ -14,0 +14,0 @@ /** |
+1
-3
| { | ||
| "name": "@zenfs/core", | ||
| "version": "0.0.7", | ||
| "version": "0.0.8", | ||
| "description": "A filesystem in your browser", | ||
@@ -56,6 +56,4 @@ "main": "dist/index.js", | ||
| "esbuild": "^0.17.18", | ||
| "esbuild-plugin-polyfill-node": "^0.3.0", | ||
| "eslint": "^8.36.0", | ||
| "jest": "^29.5.0", | ||
| "path": "^0.12.7", | ||
| "prettier": "^2.8.7", | ||
@@ -62,0 +60,0 @@ "ts-jest": "^29.1.0", |
-293
| # ZenFS | ||
| ZenFS is an in-browser file system that emulates the [Node JS file system API](http://nodejs.org/api/fs.html) and supports storing and retrieving files from various backends. ZenFS also integrates nicely into the Emscripten file system. | ||
| ## Backends | ||
| ZenFS is highly extensible, and includes many builtin filesystem backends: | ||
| - `InMemory`: Stores files in-memory. It is a temporary file store that clears when the user navigates away. | ||
| - `OverlayFS`: Mount a read-only file system as read-write by overlaying a writable file system on top of it. Like Docker's overlayfs, it will only write changed files to the writable file system. | ||
| - `AsyncMirror`: Use an asynchronous backend synchronously. Invaluable for Emscripten; let your Emscripten applications write to larger file stores with no additional effort! | ||
| - `AsyncMirror` loads the entire contents of the async file system into a synchronous backend during construction. It performs operations synchronous file system and then queues them to be mirrored onto the asynchronous backend. | ||
| - `FolderAdapter`: Wraps a file system, and scopes all interactions to a subfolder of that file system. | ||
| More backends can be defined by separate libraries, so long as they extend they implement `ZenFS.FileSystem`. Multiple backends can be active at once at different locations in the directory hierarchy. | ||
| ZenFS supports a number of other backends (as `@zenfs/fs-[name]`). | ||
| For more information, see the [API documentation for ZenFS](https://zen-fs.github.io/core). | ||
| ## Installing | ||
| ```sh | ||
| npm install @zenfs/core | ||
| ``` | ||
| ## Building | ||
| - Make sure you have Node and NPM installed. You must have Node v18 or newer. | ||
| - Install dependencies with `npm install` | ||
| - Build using `npm run build` | ||
| - You can find the built code in `dist`. | ||
| ## Usage | ||
| > 🛈 The examples are written in ESM. If you are using CJS, you can `require` the package. If running in a borwser you can add a script tag to your HTML pointing to the `browser.min.js` and use ZenFS via the global `ZenFS` object. | ||
| ```js | ||
| import { fs } from '@zenfs/core'; | ||
| fs.writeFileSync('/test.txt', 'Cool, I can do this in the browser!'); | ||
| const contents = fs.readFileSync('/test.txt', 'utf-8'); | ||
| console.log(contents); | ||
| ``` | ||
| #### Using different backends | ||
| A `InMemory` backend is created by default. If you would like to use a different one, you must configure ZenFS. It is recommended to do so using the `configure` function. Here is an example using the `LocalStorage` backend from `@zenfs/fs-dom`: | ||
| ```js | ||
| import { configure, fs } from '@zenfs/core'; | ||
| import '@zenfs/fs-dom'; // size effects are needed | ||
| // you can also add a callback as the last parameter instead of using promises | ||
| await configure({ fs: 'LocalStorage' }); | ||
| if (!fs.existsSync('/test.txt')) { | ||
| fs.writeFileSync('/test.txt', 'This will persist across reloads!'); | ||
| } | ||
| const contents = fs.readFileSync('/test.txt', 'utf-8'); | ||
| console.log(contents); | ||
| ``` | ||
| #### Using multiple backends | ||
| You can use multiple backends by passing an object to `configure` which maps paths to file systems. The following example mounts a zip file to `/zip`, in-memory storage to `/tmp`, and IndexedDB browser-local storage to `/home` (note that `/` has the default in-memory backend): | ||
| ```js | ||
| import { configure } from '@zenfs/core'; | ||
| import '@zenfs/fs-dom'; | ||
| import '@zenfs/fs-zip'; | ||
| import Buffer from 'buffer'; | ||
| const zipData = await (await fetch('mydata.zip')).arrayBuffer(); | ||
| await configure({ | ||
| '/mnt/zip': { | ||
| fs: 'ZipFS', | ||
| options: { | ||
| zipData: Buffer.from(zipData) | ||
| } | ||
| }, | ||
| '/tmp': 'InMemory', | ||
| '/home': 'IndexedDB', | ||
| }; | ||
| ``` | ||
| #### FS Promises API | ||
| The FS promises API is exposed as `promises`. | ||
| ```js | ||
| import { configure, promises } from '@zenfs/core'; | ||
| import '@zenfs/fs-dom'; | ||
| await configure({ '/': 'IndexedDB' }); | ||
| const exists = await promises.exists('/myfile.txt'); | ||
| if (!exists) { | ||
| await promises.write('/myfile.txt', 'Lots of persistant data'); | ||
| } | ||
| ``` | ||
| ZenFS does _not_ provide a seperate method for importing promises in its built form. If you are using Typescript, you can import the promises API from source code (perhaps to reduce you bundle size). Doing so it not recommended as the files may be moved without notice. | ||
| #### Using asynchronous backends synchronously | ||
| You may have noticed that attempting to use a synchronous method on an asynchronous backend (e.g. IndexedDB) results in a "not supplied" error (`ENOTSUP`). If you wish to use an asynchronous backend synchronously you need to wrap it in an `AsyncMirror`: | ||
| ```js | ||
| import { configure, fs } from '@zenfs/core'; | ||
| import '@zenfs/fs-dom'; | ||
| await configure({ | ||
| '/': { fs: 'AsyncMirror', options: { sync: { fs: 'InMemory' }, async: { fs: 'IndexedDB' } } } | ||
| }); | ||
| fs.writeFileSync('/persistant.txt', 'My persistant data'); // This fails if you configure the FS as IndexedDB | ||
| ``` | ||
| ### Advanced usage | ||
| #### Creating backends | ||
| If you would like to create backends without configure, you may do so by importing the backend's class and calling its `Create` method. You can import the backend directly or with `backends`: | ||
| ```js | ||
| import { configure, backends, InMemory } from '@zenfs/core'; | ||
| console.log(backends.InMemory === InMemory) // they are the same | ||
| const inMemoryFS = await InMemory.Create(); | ||
| ``` | ||
| > ⚠ Instances of backends follow the ***internal*** ZenFS API. You should never use a backend's method unless you are extending a backend. | ||
| Coming soon: | ||
| ```js | ||
| import { configure, InMemory } from '@zenfs/core'; | ||
| const inMemoryFS = new InMemory(); | ||
| await inMemoryFS.whenReady(); | ||
| ``` | ||
| #### Mounting | ||
| If you would like to mount and unmount backends, you can do so using the `mount` and `umount` functions: | ||
| ```js | ||
| import { fs, InMemory } from '@zenfs/core'; | ||
| const inMemoryFS = await InMemory.Create(); // create an FS instance | ||
| fs.mount('/tmp', inMemoryFS); // mount | ||
| fs.umount('/tmp'); // unmount /tmp | ||
| ``` | ||
| This could be used in the "multiple backends" example like so: | ||
| ```js | ||
| import { configure, fs, ZipFS } from '@zenfs/core'; | ||
| import '@zenfs/fs-dom'; | ||
| import '@zenfs/fs-zip'; | ||
| import Buffer from 'buffer'; | ||
| await configure({ | ||
| '/tmp': 'InMemory', | ||
| '/home': 'IndexedDB', | ||
| }; | ||
| fs.mkdirSync('/mnt'); | ||
| const res = await fetch('mydata.zip'); | ||
| const zipData = Buffer.from(await res.arrayBuffer()); | ||
| const zipFs = await ZipFS.Create({ zipData }); | ||
| fs.mount('/mnt/zip', zipFs); | ||
| // do stuff with the mounted zip | ||
| fs.umount('/mnt/zip'); // finished using the zip | ||
| ``` | ||
| ## Using with bundlers | ||
| ZenFS exports a drop-in for Node's `fs` module (up to the version of `@types/node` in package.json), so you can use it for your bundler of preference using the default export. | ||
| #### ESBuild | ||
| tsconfig.json | ||
| ```json | ||
| { | ||
| ... | ||
| "paths": { | ||
| "fs": ["node_modules/zenfs/dist/index.js"] | ||
| } | ||
| ... | ||
| } | ||
| ``` | ||
| [Why tsconfig.json?](https://stackoverflow.com/a/71935037/17637456) | ||
| Webpack: | ||
| ```js | ||
| module.exports = { | ||
| // ... | ||
| resolve: { | ||
| alias: { | ||
| fs: require.resolve('zenfs'), | ||
| }, | ||
| }, | ||
| // ... | ||
| }; | ||
| ``` | ||
| Rollup: | ||
| ```js | ||
| import alias from '@rollup/plugin-alias'; | ||
| export default { | ||
| // ... | ||
| plugins: [ | ||
| alias({ | ||
| entries: [{ find: 'fs', replacement: 'zenfs' }], | ||
| }), | ||
| ], | ||
| // ... | ||
| }; | ||
| ``` | ||
| ## Using with Emscripten | ||
| You can use any _synchronous_ ZenFS file systems with Emscripten. | ||
| ```js | ||
| import { EmscriptenFS } from '@zenfs/fs-emscripten'; | ||
| const BFS = new EmscriptenFS(); // Create a ZenFS Emscripten FS plugin. | ||
| FS.createFolder(FS.root, 'data', true, true); // Create the folder that we'll turn into a mount point. | ||
| FS.mount(BFS, { root: '/' }, '/data'); // Mount BFS's root folder into the '/data' folder. | ||
| ``` | ||
| If you want to use an asynchronous backend, you must wrap it in an `AsyncMirror`. | ||
| ### Testing | ||
| Run unit tests with `npm test`. | ||
| ### Citing | ||
| ZenFS is a component of the [Doppio](http://doppiojvm.org/) and [Browsix](https://browsix.org/) research projects from the PLASMA lab at the University of Massachusetts Amherst. If you decide to use ZenFS in a project that leads to a publication, please cite the academic papers on [Doppio](https://dl.acm.org/citation.cfm?doid=2594291.2594293) and [Browsix](https://dl.acm.org/citation.cfm?id=3037727): | ||
| > John Vilk and Emery D. Berger. Doppio: Breaking the Browser Language Barrier. In | ||
| > _Proceedings of the 35th ACM SIGPLAN Conference on Programming Language Design and Implementation_ | ||
| > (2014), pp. 508–518. | ||
| ```bibtex | ||
| @inproceedings{VilkDoppio, | ||
| author = {John Vilk and | ||
| Emery D. Berger}, | ||
| title = {{Doppio: Breaking the Browser Language Barrier}}, | ||
| booktitle = {Proceedings of the 35th {ACM} {SIGPLAN} Conference on Programming Language Design and Implementation}, | ||
| pages = {508--518}, | ||
| year = {2014}, | ||
| url = {http://doi.acm.org/10.1145/2594291.2594293}, | ||
| doi = {10.1145/2594291.2594293} | ||
| } | ||
| ``` | ||
| > Bobby Powers, John Vilk, and Emery D. Berger. Browsix: Bridging the Gap Between Unix and the Browser. In _Proceedings of the Twenty-Second International Conference on Architectural Support for Programming Languages and Operating Systems_ (2017), pp. 253–266. | ||
| ```bibtex | ||
| @inproceedings{PowersBrowsix, | ||
| author = {Bobby Powers and | ||
| John Vilk and | ||
| Emery D. Berger}, | ||
| title = {{Browsix: Bridging the Gap Between Unix and the Browser}}, | ||
| booktitle = {Proceedings of the Twenty-Second International Conference on Architectural | ||
| Support for Programming Languages and Operating Systems}, | ||
| pages = {253--266}, | ||
| year = {2017}, | ||
| url = {http://doi.acm.org/10.1145/3037697.3037727}, | ||
| doi = {10.1145/3037697.3037727} | ||
| } | ||
| ``` | ||
| ### License | ||
| ZenFS is licensed under the MIT License. See `LICENSE` for details. |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
16
-11.11%59
3.51%10854
4.15%298
1.36%23
-4.17%974893
-3.46%