Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@automerge/automerge-repo

Package Overview
Dependencies
Maintainers
4
Versions
69
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@automerge/automerge-repo - npm Package Compare versions

Comparing version 1.0.0-alpha.3 to 1.0.0-alpha.4

dist/helpers/cbor.d.ts

3

dist/DocCollection.d.ts

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
import { DocHandle } from "./DocHandle.js";

@@ -40,2 +40,3 @@ import { DocumentId, AutomergeUrl } from "./types.js";

handle: DocHandle<any>;
isNew: boolean;
}

@@ -42,0 +43,0 @@ interface DeleteDocumentPayload {

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
import { DocHandle } from "./DocHandle.js";

@@ -58,3 +58,3 @@ import { generateAutomergeUrl, isValidAutomergeUrl, parseAutomergeUrl, } from "./DocUrl.js";

const handle = this.#getHandle(documentId, true);
this.emit("document", { handle });
this.emit("document", { handle, isNew: true });
return handle;

@@ -86,3 +86,3 @@ }

const handle = this.#getHandle(documentId, false);
this.emit("document", { handle });
this.emit("document", { handle, isNew: false });
return handle;

@@ -89,0 +89,0 @@ }

@@ -1,3 +0,3 @@

import * as A from "@automerge/automerge";
import EventEmitter from "eventemitter3";
import * as A from "@automerge/automerge/next";
import { EventEmitter } from "eventemitter3";
import { StateValue } from "xstate";

@@ -62,2 +62,4 @@ import type { DocumentId, PeerId, AutomergeUrl } from "./types.js";

request(): void;
awaitNetwork(): void;
networkReady(): void;
/** `delete` is called by the repo when the document is deleted */

@@ -115,2 +117,3 @@ delete(): void;

readonly LOADING: "loading";
readonly AWAITING_NETWORK: "awaitingNetwork";
readonly REQUESTING: "requesting";

@@ -128,2 +131,4 @@ readonly READY: "ready";

readonly REQUEST_COMPLETE: "REQUEST_COMPLETE";
readonly AWAIT_NETWORK: "AWAIT_NETWORK";
readonly NETWORK_READY: "NETWORK_READY";
readonly UPDATE: "UPDATE";

@@ -134,4 +139,4 @@ readonly TIMEOUT: "TIMEOUT";

};
export declare const IDLE: "idle", LOADING: "loading", REQUESTING: "requesting", READY: "ready", FAILED: "failed", DELETED: "deleted", UNAVAILABLE: "unavailable";
export declare const IDLE: "idle", LOADING: "loading", AWAITING_NETWORK: "awaitingNetwork", REQUESTING: "requesting", READY: "ready", FAILED: "failed", DELETED: "deleted", UNAVAILABLE: "unavailable";
export {};
//# sourceMappingURL=DocHandle.d.ts.map

@@ -1,4 +0,4 @@

import * as A from "@automerge/automerge";
import * as A from "@automerge/automerge/next";
import debug from "debug";
import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
import { assign, createMachine, interpret, } from "xstate";

@@ -10,3 +10,3 @@ import { waitFor } from "xstate/lib/waitFor.js";

import { stringifyAutomergeUrl } from "./DocUrl.js";
import { encode } from "cbor-x";
import { encode } from "./helpers/cbor.js";
/** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */

@@ -28,3 +28,7 @@ export class DocHandle//

// initial doc
const doc = A.init();
let doc = A.init();
// Make an empty change so that we have something to save to disk
if (isNew) {
doc = A.emptyChange(doc, {});
}
/**

@@ -64,2 +68,4 @@ * Internally we use a state machine to orchestrate document loading and/or syncing, in order to

REQUEST: { target: REQUESTING },
// AWAIT_NETWORK is called by the repo if the document is not found in storage but the network is not yet ready
AWAIT_NETWORK: { target: AWAITING_NETWORK },
DELETE: { actions: "onDelete", target: DELETED },

@@ -74,2 +80,7 @@ },

},
awaitingNetwork: {
on: {
NETWORK_READY: { target: REQUESTING },
}
},
requesting: {

@@ -285,2 +296,10 @@ on: {

}
awaitNetwork() {
if (this.#state === LOADING)
this.#machine.send(AWAIT_NETWORK);
}
networkReady() {
if (this.#state === AWAITING_NETWORK)
this.#machine.send(NETWORK_READY);
}
/** `delete` is called by the repo when the document is deleted */

@@ -308,2 +327,3 @@ delete() {

LOADING: "loading",
AWAITING_NETWORK: "awaitingNetwork",
REQUESTING: "requesting",

@@ -321,2 +341,4 @@ READY: "ready",

REQUEST_COMPLETE: "REQUEST_COMPLETE",
AWAIT_NETWORK: "AWAIT_NETWORK",
NETWORK_READY: "NETWORK_READY",
UPDATE: "UPDATE",

@@ -328,3 +350,3 @@ TIMEOUT: "TIMEOUT",

// CONSTANTS
export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED, UNAVAILABLE, } = HandleState;
const { CREATE, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE, MARK_UNAVAILABLE, } = Event;
export const { IDLE, LOADING, AWAITING_NETWORK, REQUESTING, READY, FAILED, DELETED, UNAVAILABLE, } = HandleState;
const { CREATE, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE, MARK_UNAVAILABLE, AWAIT_NETWORK, NETWORK_READY } = Event;

@@ -1,2 +0,2 @@

import { type AutomergeUrl, type BinaryDocumentId, type DocumentId } from "./types";
import { type AutomergeUrl, type BinaryDocumentId, type DocumentId } from "./types.js";
export declare const urlPrefix = "automerge:";

@@ -3,0 +3,0 @@ /**

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
/** Returns a promise that resolves when the given event is emitted on the given emitter. */

@@ -3,0 +3,0 @@ export declare const eventPromise: (emitter: EventEmitter, event: string) => Promise<any>;

@@ -1,3 +0,2 @@

import { Heads } from "@automerge/automerge";
export declare const headsAreSame: (a: Heads, b: Heads) => boolean;
//# sourceMappingURL=headsAreSame.d.ts.map

@@ -14,2 +14,3 @@ export { DocCollection } from "./DocCollection.js";

export * from "./types.js";
export * as cbor from "./helpers/cbor.js";
//# sourceMappingURL=index.d.ts.map

@@ -11,1 +11,2 @@ export { DocCollection } from "./DocCollection.js";

export * from "./types.js";
export * as cbor from "./helpers/cbor.js";

@@ -1,3 +0,3 @@

import { SessionId } from "../EphemeralData";
import { DocumentId, PeerId } from "../types";
import { SessionId } from "../EphemeralData.js";
import { DocumentId, PeerId } from "../types.js";
export declare function isValidMessage(message: NetworkAdapterMessage): message is SyncMessage | EphemeralMessage | RequestMessage | DocumentUnavailableMessage;

@@ -4,0 +4,0 @@ export declare function isDocumentUnavailableMessage(message: NetworkAdapterMessage): message is DocumentUnavailableMessage;

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
import { PeerId } from "../types.js";

@@ -12,3 +12,3 @@ import { Message } from "./messages.js";

export interface NetworkAdapterEvents {
open: (payload: OpenPayload) => void;
ready: (payload: OpenPayload) => void;
close: () => void;

@@ -15,0 +15,0 @@ "peer-candidate": (payload: PeerCandidatePayload) => void;

@@ -1,4 +0,4 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
export class NetworkAdapter extends EventEmitter {
peerId; // hmmm, maybe not
}

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
import { PeerId } from "../types.js";

@@ -7,3 +7,2 @@ import { NetworkAdapter, PeerDisconnectedPayload } from "./NetworkAdapter.js";

#private;
private adapters;
peerId: PeerId;

@@ -15,2 +14,4 @@ constructor(adapters: NetworkAdapter[], peerId?: PeerId);

leave(): void;
isReady: () => boolean;
whenReady: () => Promise<void>;
}

@@ -21,2 +22,3 @@ export interface NetworkSubsystemEvents {

message: (payload: Message) => void;
"ready": () => void;
}

@@ -23,0 +25,0 @@ export interface PeerPayload {

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
import { isEphemeralMessage, isValidMessage, } from "./messages.js";

@@ -6,3 +6,2 @@ import debug from "debug";

export class NetworkSubsystem extends EventEmitter {
adapters;
peerId;

@@ -14,11 +13,19 @@ #log;

#ephemeralSessionCounts = {};
#readyAdapterCount = 0;
#adapters = [];
constructor(adapters, peerId = randomPeerId()) {
super();
this.adapters = adapters;
this.peerId = peerId;
this.#log = debug(`automerge-repo:network:${this.peerId}`);
this.adapters.forEach(a => this.addNetworkAdapter(a));
adapters.forEach(a => this.addNetworkAdapter(a));
}
addNetworkAdapter(networkAdapter) {
networkAdapter.connect(this.peerId);
this.#adapters.push(networkAdapter);
networkAdapter.once("ready", () => {
this.#readyAdapterCount++;
this.#log("Adapters ready: ", this.#readyAdapterCount, "/", this.#adapters.length);
if (this.#readyAdapterCount === this.#adapters.length) {
this.emit("ready");
}
});
networkAdapter.on("peer-candidate", ({ peerId }) => {

@@ -63,2 +70,3 @@ this.#log(`peer candidate: ${peerId} `);

});
networkAdapter.connect(this.peerId);
networkAdapter.join();

@@ -93,8 +101,23 @@ }

this.#log(`Joining network`);
this.adapters.forEach(a => a.join());
this.#adapters.forEach(a => a.join());
}
leave() {
this.#log(`Leaving network`);
this.adapters.forEach(a => a.leave());
this.#adapters.forEach(a => a.leave());
}
isReady = () => {
return this.#readyAdapterCount === this.#adapters.length;
};
whenReady = async () => {
if (this.isReady()) {
return;
}
else {
return new Promise(resolve => {
this.once("ready", () => {
resolve();
});
});
}
};
}

@@ -101,0 +124,0 @@ function randomPeerId() {

@@ -18,3 +18,3 @@ import debug from "debug";

// up a document by ID. We listen for it in order to wire up storage and network synchronization.
this.on("document", async ({ handle }) => {
this.on("document", async ({ handle, isNew }) => {
if (storageSubsystem) {

@@ -25,7 +25,13 @@ // Save when the document changes

});
// Try to load from disk
const loadedDoc = await storageSubsystem.loadDoc(handle.documentId);
if (loadedDoc) {
handle.update(() => loadedDoc);
if (isNew) {
// this is a new document, immediately save it
await storageSubsystem.saveDoc(handle.documentId, handle.docSync());
}
else {
// Try to load from disk
const loadedDoc = await storageSubsystem.loadDoc(handle.documentId);
if (loadedDoc) {
handle.update(() => loadedDoc);
}
}
}

@@ -38,3 +44,13 @@ handle.on("unavailable", () => {

});
handle.request();
if (this.networkSubsystem.isReady()) {
handle.request();
}
else {
handle.awaitNetwork();
this.networkSubsystem.whenReady().then(() => {
handle.networkReady();
}).catch(err => {
this.#log("error waiting for network", { err });
});
}
// Register the document with the synchronizer. This advertises our interest in the document.

@@ -47,3 +63,5 @@ synchronizer.addDocument(handle.documentId);

if (storageSubsystem) {
storageSubsystem.remove(documentId);
storageSubsystem.remove(documentId).catch(err => {
this.#log("error deleting document", { documentId, err });
});
}

@@ -50,0 +68,0 @@ });

@@ -1,2 +0,2 @@

import * as A from "@automerge/automerge";
import * as A from "@automerge/automerge/next";
import { StorageAdapter } from "./StorageAdapter.js";

@@ -3,0 +3,0 @@ import { type DocumentId } from "../types.js";

@@ -1,2 +0,2 @@

import * as A from "@automerge/automerge";
import * as A from "@automerge/automerge/next";
import * as sha256 from "fast-sha256";

@@ -14,3 +14,3 @@ import { mergeArrays } from "../helpers/mergeArrays.js";

let encoder = new TextEncoder();
let headsbinary = mergeArrays(heads.map(h => encoder.encode(h)));
let headsbinary = mergeArrays(heads.map((h) => encoder.encode(h)));
return keyHash(headsbinary);

@@ -17,0 +17,0 @@ }

@@ -1,2 +0,2 @@

import * as A from "@automerge/automerge";
import * as A from "@automerge/automerge/next";
import { READY, REQUESTING, UNAVAILABLE, } from "../DocHandle.js";

@@ -127,3 +127,3 @@ import { Synchronizer } from "./Synchronizer.js";

const expanded = this.#opsLog.enabled
? decoded.changes.flatMap(change => A.decodeChange(change).ops.map(op => JSON.stringify(op)))
? decoded.changes.flatMap((change) => A.decodeChange(change).ops.map((op) => JSON.stringify(op)))
: null;

@@ -138,15 +138,14 @@ this.#opsLog(logText, expanded);

this.#log(`beginSync: ${peerIds.join(", ")}`);
// HACK: if we have a sync state already, we round-trip it through the encoding system to make
// sure state is preserved. This prevents an infinite loop caused by failed attempts to send
// messages during disconnection.
// TODO: cover that case with a test and remove this hack
peerIds.forEach(peerId => {
const syncStateRaw = this.#getSyncState(peerId);
const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw));
this.#setSyncState(peerId, syncState);
});
// At this point if we don't have anything in our storage, we need to use an empty doc to sync
// with; but we don't want to surface that state to the front end
void this.handle.doc([READY, REQUESTING, UNAVAILABLE]).then(doc => {
// if we don't have any peers, then we can say the document is unavailable
// HACK: if we have a sync state already, we round-trip it through the encoding system to make
// sure state is preserved. This prevents an infinite loop caused by failed attempts to send
// messages during disconnection.
// TODO: cover that case with a test and remove this hack
peerIds.forEach(peerId => {
const syncStateRaw = this.#getSyncState(peerId);
const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw));
this.#setSyncState(peerId, syncState);
});
// we register out peers first, then say that sync has started

@@ -153,0 +152,0 @@ this.#syncStarted = true;

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
import { Message, MessageContents } from "../network/messages.js";

@@ -3,0 +3,0 @@ export declare abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {

@@ -1,3 +0,3 @@

import EventEmitter from "eventemitter3";
import { EventEmitter } from "eventemitter3";
export class Synchronizer extends EventEmitter {
}
import assert from "assert"
import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
import * as Automerge from "@automerge/automerge"
import * as Automerge from "@automerge/automerge/next"

@@ -5,0 +5,0 @@ import { DocHandle, DocumentId, PeerId, SharePolicy } from "../src"

{
"name": "@automerge/automerge-repo",
"version": "1.0.0-alpha.3",
"version": "1.0.0-alpha.4",
"description": "A repository object to manage a collection of automerge documents",

@@ -34,3 +34,3 @@ "repository": "https://github.com/automerge/automerge-repo",

"peerDependencies": {
"@automerge/automerge": "^2.1.0-alpha.10"
"@automerge/automerge": "^2.1.0-alpha.12"
},

@@ -69,3 +69,3 @@ "dependencies": {

},
"gitHead": "0ed108273084319aeea64ceccb49c3d58709f107"
"gitHead": "fbf71f0c3aaa2786a4e279f336f01d665f53ce5b"
}

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3"
import { EventEmitter } from "eventemitter3"
import { DocHandle } from "./DocHandle.js"

@@ -77,3 +77,3 @@ import { DocumentId, type BinaryDocumentId, AutomergeUrl } from "./types.js"

const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
this.emit("document", { handle })
this.emit("document", { handle, isNew: true })
return handle

@@ -109,3 +109,3 @@ }

const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
this.emit("document", { handle })
this.emit("document", { handle, isNew: false })
return handle

@@ -141,2 +141,3 @@ }

handle: DocHandle<any>
isNew: boolean
}

@@ -143,0 +144,0 @@

@@ -1,4 +0,4 @@

import * as A from "@automerge/automerge"
import * as A from "@automerge/automerge/next"
import debug from "debug"
import EventEmitter from "eventemitter3"
import { EventEmitter } from "eventemitter3"
import {

@@ -22,3 +22,3 @@ assign,

import { stringifyAutomergeUrl } from "./DocUrl.js"
import { encode } from "cbor-x"
import { encode } from "./helpers/cbor.js"

@@ -47,4 +47,9 @@ /** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */

// initial doc
const doc = A.init<T>()
let doc = A.init<T>()
// Make an empty change so that we have something to save to disk
if (isNew) {
doc = A.emptyChange(doc, {})
}
/**

@@ -87,2 +92,4 @@ * Internally we use a state machine to orchestrate document loading and/or syncing, in order to

REQUEST: { target: REQUESTING },
// AWAIT_NETWORK is called by the repo if the document is not found in storage but the network is not yet ready
AWAIT_NETWORK: { target: AWAITING_NETWORK },
DELETE: { actions: "onDelete", target: DELETED },

@@ -97,2 +104,7 @@ },

},
awaitingNetwork: {
on: {
NETWORK_READY: { target: REQUESTING },
}
},
requesting: {

@@ -349,2 +361,10 @@ on: {

awaitNetwork() {
if (this.#state === LOADING) this.#machine.send(AWAIT_NETWORK)
}
networkReady() {
if (this.#state === AWAITING_NETWORK) this.#machine.send(NETWORK_READY)
}
/** `delete` is called by the repo when the document is deleted */

@@ -427,2 +447,3 @@ delete() {

LOADING: "loading",
AWAITING_NETWORK: "awaitingNetwork",
REQUESTING: "requesting",

@@ -457,2 +478,4 @@ READY: "ready",

REQUEST_COMPLETE: "REQUEST_COMPLETE",
AWAIT_NETWORK: "AWAIT_NETWORK",
NETWORK_READY: "NETWORK_READY",
UPDATE: "UPDATE",

@@ -476,2 +499,4 @@ TIMEOUT: "TIMEOUT",

type MarkUnavailableEvent = { type: typeof MARK_UNAVAILABLE }
type AwaitNetworkEvent = { type: typeof AWAIT_NETWORK }
type NetworkReadyEvent = { type: typeof NETWORK_READY }

@@ -487,2 +512,4 @@ type DocHandleEvent<T> =

| MarkUnavailableEvent
| AwaitNetworkEvent
| NetworkReadyEvent

@@ -509,2 +536,3 @@ type DocHandleXstateMachine<T> = Interpreter<

LOADING,
AWAITING_NETWORK,
REQUESTING,

@@ -525,2 +553,4 @@ READY,

MARK_UNAVAILABLE,
AWAIT_NETWORK,
NETWORK_READY
} = Event

@@ -5,3 +5,3 @@ import {

type DocumentId,
} from "./types"
} from "./types.js"
import { v4 as uuid } from "uuid"

@@ -8,0 +8,0 @@ import bs58check from "bs58check"

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3"
import { EventEmitter } from "eventemitter3"

@@ -3,0 +3,0 @@ /** Returns a promise that resolves when the given event is emitted on the given emitter. */

@@ -1,2 +0,2 @@

import {Heads} from "@automerge/automerge"
import {Heads} from "@automerge/automerge/next"
import { arraysAreEqual } from "./arraysAreEqual.js"

@@ -3,0 +3,0 @@

@@ -33,1 +33,3 @@ export { DocCollection } from "./DocCollection.js"

export * from "./types.js"
export * as cbor from "./helpers/cbor.js"
// utilities
import { SessionId } from "../EphemeralData"
import { DocumentId, PeerId } from "../types"
import { SessionId } from "../EphemeralData.js"
import { DocumentId, PeerId } from "../types.js"

@@ -5,0 +5,0 @@ export function isValidMessage(

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3"
import { EventEmitter } from "eventemitter3"
import { PeerId } from "../types.js"

@@ -20,3 +20,3 @@ import { Message } from "./messages.js"

export interface NetworkAdapterEvents {
open: (payload: OpenPayload) => void
ready: (payload: OpenPayload) => void
close: () => void

@@ -23,0 +23,0 @@ "peer-candidate": (payload: PeerCandidatePayload) => void

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3"
import { EventEmitter } from "eventemitter3"
import { PeerId } from "../types.js"

@@ -28,5 +28,7 @@ import { NetworkAdapter, PeerDisconnectedPayload } from "./NetworkAdapter.js"

#ephemeralSessionCounts: Record<EphemeralMessageSource, number> = {}
#readyAdapterCount = 0
#adapters: NetworkAdapter[] = []
constructor(
private adapters: NetworkAdapter[],
adapters: NetworkAdapter[],
public peerId = randomPeerId()

@@ -36,7 +38,14 @@ ) {

this.#log = debug(`automerge-repo:network:${this.peerId}`)
this.adapters.forEach(a => this.addNetworkAdapter(a))
adapters.forEach(a => this.addNetworkAdapter(a))
}
addNetworkAdapter(networkAdapter: NetworkAdapter) {
networkAdapter.connect(this.peerId)
this.#adapters.push(networkAdapter)
networkAdapter.once("ready", () => {
this.#readyAdapterCount++
this.#log("Adapters ready: ", this.#readyAdapterCount, "/", this.#adapters.length)
if (this.#readyAdapterCount === this.#adapters.length) {
this.emit("ready")
}
})

@@ -95,2 +104,3 @@ networkAdapter.on("peer-candidate", ({ peerId }) => {

networkAdapter.connect(this.peerId)
networkAdapter.join()

@@ -128,3 +138,3 @@ }

this.#log(`Joining network`)
this.adapters.forEach(a => a.join())
this.#adapters.forEach(a => a.join())
}

@@ -134,4 +144,20 @@

this.#log(`Leaving network`)
this.adapters.forEach(a => a.leave())
this.#adapters.forEach(a => a.leave())
}
isReady = () => {
return this.#readyAdapterCount === this.#adapters.length
}
whenReady = async () => {
if (this.isReady()) {
return
} else {
return new Promise<void>(resolve => {
this.once("ready", () => {
resolve()
})
})
}
}
}

@@ -149,2 +175,3 @@

message: (payload: Message) => void
"ready": () => void
}

@@ -151,0 +178,0 @@

@@ -26,3 +26,3 @@ import debug from "debug"

// up a document by ID. We listen for it in order to wire up storage and network synchronization.
this.on("document", async ({ handle }) => {
this.on("document", async ({ handle, isNew }) => {
if (storageSubsystem) {

@@ -34,6 +34,11 @@ // Save when the document changes

// Try to load from disk
const loadedDoc = await storageSubsystem.loadDoc(handle.documentId)
if (loadedDoc) {
handle.update(() => loadedDoc)
if (isNew) {
// this is a new document, immediately save it
await storageSubsystem.saveDoc(handle.documentId, handle.docSync()!)
} else {
// Try to load from disk
const loadedDoc = await storageSubsystem.loadDoc(handle.documentId)
if (loadedDoc) {
handle.update(() => loadedDoc)
}
}

@@ -49,3 +54,12 @@ }

handle.request()
if (this.networkSubsystem.isReady()) {
handle.request()
} else {
handle.awaitNetwork()
this.networkSubsystem.whenReady().then(() => {
handle.networkReady()
}).catch(err => {
this.#log("error waiting for network", { err })
})
}

@@ -61,3 +75,5 @@ // Register the document with the synchronizer. This advertises our interest in the document.

if (storageSubsystem) {
storageSubsystem.remove(documentId)
storageSubsystem.remove(documentId).catch(err => {
this.#log("error deleting document", { documentId, err })
})
}

@@ -64,0 +80,0 @@ })

@@ -1,2 +0,2 @@

import * as A from "@automerge/automerge"
import * as A from "@automerge/automerge/next"
import { StorageAdapter, StorageKey } from "./StorageAdapter.js"

@@ -28,3 +28,3 @@ import * as sha256 from "fast-sha256"

let encoder = new TextEncoder()
let headsbinary = mergeArrays(heads.map(h => encoder.encode(h)))
let headsbinary = mergeArrays(heads.map((h: string) => encoder.encode(h)))
return keyHash(headsbinary)

@@ -31,0 +31,0 @@ }

@@ -1,3 +0,4 @@

import * as A from "@automerge/automerge"
import * as A from "@automerge/automerge/next"
import {
AWAITING_NETWORK,
DocHandle,

@@ -177,4 +178,4 @@ DocHandleOutboundEphemeralMessagePayload,

const expanded = this.#opsLog.enabled
? decoded.changes.flatMap(change =>
A.decodeChange(change).ops.map(op => JSON.stringify(op))
? decoded.changes.flatMap((change: A.Change) =>
A.decodeChange(change).ops.map((op: any) => JSON.stringify(op))
)

@@ -194,17 +195,16 @@ : null

// HACK: if we have a sync state already, we round-trip it through the encoding system to make
// sure state is preserved. This prevents an infinite loop caused by failed attempts to send
// messages during disconnection.
// TODO: cover that case with a test and remove this hack
peerIds.forEach(peerId => {
const syncStateRaw = this.#getSyncState(peerId)
const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw))
this.#setSyncState(peerId, syncState)
})
// At this point if we don't have anything in our storage, we need to use an empty doc to sync
// with; but we don't want to surface that state to the front end
void this.handle.doc([READY, REQUESTING, UNAVAILABLE]).then(doc => {
// if we don't have any peers, then we can say the document is unavailable
// HACK: if we have a sync state already, we round-trip it through the encoding system to make
// sure state is preserved. This prevents an infinite loop caused by failed attempts to send
// messages during disconnection.
// TODO: cover that case with a test and remove this hack
peerIds.forEach(peerId => {
const syncStateRaw = this.#getSyncState(peerId)
const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw))
this.#setSyncState(peerId, syncState)
})
// we register out peers first, then say that sync has started

@@ -211,0 +211,0 @@ this.#syncStarted = true

@@ -1,2 +0,2 @@

import EventEmitter from "eventemitter3"
import { EventEmitter } from "eventemitter3"
import { Message, MessageContents } from "../network/messages.js"

@@ -3,0 +3,0 @@

import assert from "assert"
import { beforeEach } from "mocha"
import { DocCollection, PeerId } from "../src"
import { DocCollection, PeerId } from "../src/index.js"
import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"

@@ -5,0 +5,0 @@

import assert from "assert"
import { DocCollection, BinaryDocumentId } from "../src"
import { DocCollection, BinaryDocumentId } from "../src/index.js"
import { TestDoc } from "./types.js"
import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl"
import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl.js"

@@ -6,0 +6,0 @@ const MISSING_DOCID = generateAutomergeUrl()

@@ -1,9 +0,9 @@

import * as A from "@automerge/automerge"
import * as A from "@automerge/automerge/next"
import assert from "assert"
import { it } from "mocha"
import { DocHandle, DocHandleChangePayload } from "../src"
import { pause } from "../src/helpers/pause"
import { DocHandle, DocHandleChangePayload } from "../src/index.js"
import { pause } from "../src/helpers/pause.js"
import { TestDoc } from "./types.js"
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl"
import { eventPromise } from "../src/helpers/eventPromise"
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
import { eventPromise } from "../src/helpers/eventPromise.js"
import { decode } from "cbor-x"

@@ -10,0 +10,0 @@

@@ -1,8 +0,17 @@

import { NetworkAdapter } from "../../src"
import { NetworkAdapter } from "../../src/index.js"
export class DummyNetworkAdapter extends NetworkAdapter {
#startReady = true
constructor(startReady: boolean) {
super()
this.#startReady = startReady
}
send() {}
connect(_: string) {}
connect(_: string) {
if (this.#startReady) {
this.emit("ready", { network: this })
}
}
join() {}
leave() {}
}

@@ -1,2 +0,2 @@

import { StorageAdapter, type StorageKey } from "../../src"
import { StorageAdapter, type StorageKey } from "../../src/index.js"

@@ -3,0 +3,0 @@ export class DummyStorageAdapter implements StorageAdapter {

@@ -11,3 +11,3 @@ import assert from "assert"

SharePolicy,
} from "../src"
} from "../src/index.js"
import { eventPromise } from "../src/helpers/eventPromise.js"

@@ -20,15 +20,16 @@ import { pause, rejectOnTimeout } from "../src/helpers/pause.js"

import { TestDoc } from "./types.js"
import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl"
import { READY } from "../src/DocHandle"
import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl.js"
import { READY, AWAITING_NETWORK } from "../src/DocHandle.js"
describe("Repo", () => {
describe("single repo", () => {
const setup = () => {
const setup = (networkReady = true) => {
const storageAdapter = new DummyStorageAdapter()
const networkAdapter = new DummyNetworkAdapter(networkReady)
const repo = new Repo({
storage: storageAdapter,
network: [new DummyNetworkAdapter()],
network: [networkAdapter],
})
return { repo, storageAdapter }
return { repo, storageAdapter, networkAdapter }
}

@@ -88,2 +89,19 @@

it("doesn't mark a document as unavailable until network adapters are ready", async () => {
const { repo, networkAdapter } = setup(false)
const url = generateAutomergeUrl()
const handle = repo.find<TestDoc>(url)
let wasUnavailable = false
handle.on("unavailable", () => {
wasUnavailable = true
})
await pause(50)
assert.equal(wasUnavailable, false)
networkAdapter.emit("ready", { network: networkAdapter })
await eventPromise(handle, "unavailable")
})
it("can find a created document", async () => {

@@ -106,2 +124,17 @@ const { repo } = setup()

it("saves the document when creating it", async () => {
const { repo, storageAdapter } = setup()
const handle = repo.create<TestDoc>()
const repo2 = new Repo({
storage: storageAdapter,
network: [],
})
const bobHandle = repo2.find<TestDoc>(handle.url)
await bobHandle.whenReady()
assert.equal(bobHandle.isReady(), true)
})
it("saves the document when changed and can find it again", async () => {

@@ -304,2 +337,11 @@ const { repo, storageAdapter } = setup()

function doConnectAlice() {
aliceRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(aliceToBob))
//bobRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(bobToAlice))
}
if (connectAlice) {
doConnectAlice()
}
return {

@@ -310,3 +352,3 @@ teardown,

charlieRepo,
aliceNetworkAdapter,
connectAliceToBob: doConnectAlice,
}

@@ -316,3 +358,3 @@ }

const setup = async (connectAlice = true) => {
const { teardown, aliceRepo, bobRepo, charlieRepo, aliceNetworkAdapter } =
const { teardown, aliceRepo, bobRepo, charlieRepo, connectAliceToBob } =
setupRepos(connectAlice)

@@ -355,3 +397,3 @@

teardown,
aliceNetworkAdapter,
connectAliceToBob,
}

@@ -454,3 +496,3 @@ }

teardown,
aliceNetworkAdapter,
connectAliceToBob,
} = await setup(false)

@@ -464,3 +506,3 @@

aliceRepo.networkSubsystem.addNetworkAdapter(aliceNetworkAdapter)
connectAliceToBob()

@@ -467,0 +509,0 @@ await eventPromise(aliceRepo.networkSubsystem, "peer")

@@ -7,3 +7,3 @@ import fs from "fs"

import A from "@automerge/automerge"
import * as A from "@automerge/automerge/next"

@@ -13,4 +13,3 @@ import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"

import { StorageSubsystem } from "../src"
import { TestDoc } from "./types.js"
import { StorageSubsystem } from "../src/index.js"
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"

@@ -17,0 +16,0 @@

@@ -5,4 +5,4 @@ {

"jsx": "react",
"module": "ESNext",
"moduleResolution": "node",
"module": "NodeNext",
"moduleResolution": "Node16",
"declaration": true,

@@ -9,0 +9,0 @@ "declarationMap": true,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc