New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@automerge/automerge-repo

Package Overview
Dependencies
Maintainers
4
Versions
75
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@automerge/automerge-repo - npm Package Compare versions

Comparing version 1.0.7 to 1.0.8

dist/helpers/debounce.d.ts

2

dist/DocHandle.d.ts
import * as A from "@automerge/automerge/next";
import { EventEmitter } from "eventemitter3";
import { StateValue } from "xstate";
import type { DocumentId, PeerId, AutomergeUrl } from "./types.js";
import type { AutomergeUrl, DocumentId, PeerId } from "./types.js";
/** DocHandle is a wrapper around a single Automerge document that lets us

@@ -6,0 +6,0 @@ * listen for changes and notify the network and storage of new changes.

@@ -6,6 +6,6 @@ import * as A from "@automerge/automerge/next";

import { waitFor } from "xstate/lib/waitFor.js";
import { stringifyAutomergeUrl } from "./DocUrl.js";
import { encode } from "./helpers/cbor.js";
import { headsAreSame } from "./helpers/headsAreSame.js";
import { withTimeout } from "./helpers/withTimeout.js";
import { stringifyAutomergeUrl } from "./DocUrl.js";
import { encode } from "./helpers/cbor.js";
/** DocHandle is a wrapper around a single Automerge document that lets us

@@ -12,0 +12,0 @@ * listen for changes and notify the network and storage of new changes.

@@ -28,8 +28,8 @@ /**

*/
export { DocHandle } from "./DocHandle.js";
export { isValidAutomergeUrl, parseAutomergeUrl, stringifyAutomergeUrl, } from "./DocUrl.js";
export { Repo } from "./Repo.js";
export { DocHandle } from "./DocHandle.js";
export { NetworkAdapter } from "./network/NetworkAdapter.js";
export { isValidRepoMessage } from "./network/messages.js";
export { StorageAdapter } from "./storage/StorageAdapter.js";
export { isValidAutomergeUrl, parseAutomergeUrl, stringifyAutomergeUrl, } from "./DocUrl.js";
export { isValidRepoMessage } from "./network/messages.js";
/** @hidden **/

@@ -36,0 +36,0 @@ export * as cbor from "./helpers/cbor.js";

@@ -28,10 +28,10 @@ /**

*/
export { DocHandle } from "./DocHandle.js";
export { isValidAutomergeUrl, parseAutomergeUrl, stringifyAutomergeUrl, } from "./DocUrl.js";
export { Repo } from "./Repo.js";
export { DocHandle } from "./DocHandle.js";
export { NetworkAdapter } from "./network/NetworkAdapter.js";
export { isValidRepoMessage } from "./network/messages.js";
export { StorageAdapter } from "./storage/StorageAdapter.js";
export { isValidAutomergeUrl, parseAutomergeUrl, stringifyAutomergeUrl, } from "./DocUrl.js";
export { isValidRepoMessage } from "./network/messages.js";
/** @hidden **/
export * as cbor from "./helpers/cbor.js";
export * from "./types.js";
import { EventEmitter } from "eventemitter3";
import { PeerId } from "../types.js";
import { NetworkAdapter, PeerDisconnectedPayload } from "./NetworkAdapter.js";
import { RepoMessage, MessageContents } from "./messages.js";
import { MessageContents, RepoMessage } from "./messages.js";
export declare class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {

@@ -6,0 +6,0 @@ #private;

@@ -0,1 +1,3 @@

import { EventEmitter } from "eventemitter3";
import { DocHandle } from "./DocHandle.js";
import { NetworkAdapter } from "./network/NetworkAdapter.js";

@@ -5,5 +7,3 @@ import { NetworkSubsystem } from "./network/NetworkSubsystem.js";

import { StorageSubsystem } from "./storage/StorageSubsystem.js";
import { type AutomergeUrl, DocumentId, PeerId } from "./types.js";
import { DocHandle } from "./DocHandle.js";
import { EventEmitter } from "eventemitter3";
import { DocumentId, PeerId, type AutomergeUrl } from "./types.js";
/** A Repo is a collection of documents with networking, syncing, and storage capabilities. */

@@ -23,2 +23,5 @@ /** The `Repo` is the main entry point of this library

storageSubsystem?: StorageSubsystem;
/** The debounce rate is adjustable on the repo. */
/** @hidden */
saveDebounceRate: number;
/** By default, we share generously with all peers. */

@@ -25,0 +28,0 @@ /** @hidden */

@@ -0,9 +1,10 @@

import { next as Automerge } from "@automerge/automerge";
import debug from "debug";
import { EventEmitter } from "eventemitter3";
import { DocHandle } from "./DocHandle.js";
import { generateAutomergeUrl, isValidAutomergeUrl, parseAutomergeUrl, parseLegacyUUID, } from "./DocUrl.js";
import { throttle } from "./helpers/throttle.js";
import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
import { StorageSubsystem } from "./storage/StorageSubsystem.js";
import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
import { parseAutomergeUrl, generateAutomergeUrl, isValidAutomergeUrl, parseLegacyUUID, } from "./DocUrl.js";
import { DocHandle } from "./DocHandle.js";
import { EventEmitter } from "eventemitter3";
import { next as Automerge } from "@automerge/automerge";
/** A Repo is a collection of documents with networking, syncing, and storage capabilities. */

@@ -23,2 +24,5 @@ /** The `Repo` is the main entry point of this library

storageSubsystem;
/** The debounce rate is adjustable on the repo. */
/** @hidden */
saveDebounceRate = 100;
#handleCache = {};

@@ -37,6 +41,7 @@ /** By default, we share generously with all peers. */

if (storageSubsystem) {
// Save when the document changes
handle.on("heads-changed", async ({ handle, doc }) => {
await storageSubsystem.saveDoc(handle.documentId, doc);
});
// Save when the document changes, but no more often than saveDebounceRate.
const saveFn = ({ handle, doc, }) => {
void storageSubsystem.saveDoc(handle.documentId, doc);
};
const debouncedSaveFn = handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate));
if (isNew) {

@@ -43,0 +48,0 @@ // this is a new document, immediately save it

import * as A from "@automerge/automerge/next";
import { type DocumentId } from "../types.js";
import { StorageAdapter } from "./StorageAdapter.js";
import { type DocumentId } from "../types.js";
export type ChunkType = "snapshot" | "incremental";

@@ -5,0 +5,0 @@ export declare class StorageSubsystem {

import * as A from "@automerge/automerge/next";
import debug from "debug";
import * as sha256 from "fast-sha256";
import { headsAreSame } from "../helpers/headsAreSame.js";
import { mergeArrays } from "../helpers/mergeArrays.js";
import debug from "debug";
import { headsAreSame } from "../helpers/headsAreSame.js";
function keyHash(binary) {

@@ -7,0 +7,0 @@ const hash = sha256.hash(binary);

import { Repo } from "../Repo.js";
import { RepoMessage } from "../network/messages.js";
import { DocumentId, PeerId } from "../types.js";
import { Synchronizer } from "./Synchronizer.js";
import { RepoMessage } from "../network/messages.js";
/** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */

@@ -6,0 +6,0 @@ export declare class CollectionSynchronizer extends Synchronizer {

@@ -0,5 +1,5 @@

import debug from "debug";
import { stringifyAutomergeUrl } from "../DocUrl.js";
import { DocSynchronizer } from "./DocSynchronizer.js";
import { Synchronizer } from "./Synchronizer.js";
import debug from "debug";
const log = debug("automerge-repo:collectionsync");

@@ -6,0 +6,0 @@ /** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */

@@ -13,2 +13,3 @@ import { DocHandle } from "../DocHandle.js";

private handle;
syncDebounceRate: number;
constructor(handle: DocHandle<unknown>);

@@ -15,0 +16,0 @@ get peerStates(): Record<PeerId, PeerDocumentStatus>;

@@ -7,2 +7,3 @@ import * as A from "@automerge/automerge/next";

import { Synchronizer } from "./Synchronizer.js";
import { throttle } from "../helpers/throttle.js";
/**

@@ -15,4 +16,3 @@ * DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages

#log;
#conciseLog;
#opsLog;
syncDebounceRate = 100;
/** Active peers */

@@ -29,6 +29,4 @@ #peers = [];

const docId = handle.documentId.slice(0, 5);
this.#conciseLog = debug(`automerge-repo:concise:docsync:${docId}`); // Only logs one line per receive/send
this.#log = debug(`automerge-repo:docsync:${docId}`);
this.#opsLog = debug(`automerge-repo:ops:docsync:${docId}`); // Log list of ops of each message
handle.on("change", () => this.#syncWithPeers());
handle.on("change", throttle(() => this.#syncWithPeers(), this.syncDebounceRate));
handle.on("ephemeral-message-outbound", payload => this.#broadcastToPeers(payload));

@@ -35,0 +33,0 @@ // Process pending sync messages immediately after the handle becomes ready.

import { EventEmitter } from "eventemitter3";
import { RepoMessage, MessageContents } from "../network/messages.js";
import { MessageContents, RepoMessage } from "../network/messages.js";
export declare abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {

@@ -4,0 +4,0 @@ abstract receiveMessage(message: RepoMessage): void;

@@ -1,11 +0,13 @@

import assert from "assert"
import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
import * as Automerge from "@automerge/automerge/next"
import { DocHandle, DocumentId, PeerId, SharePolicy } from "../src"
import assert from "assert"
import { eventPromise } from "../src/helpers/eventPromise.js"
import { pause } from "../src/helpers/pause.js"
import { Repo } from "../src/Repo.js"
import { DummyNetworkAdapter } from "../test/helpers/DummyNetworkAdapter.js"
import { DummyStorageAdapter } from "../test/helpers/DummyStorageAdapter.js"
import {
DocHandle,
DocumentId,
PeerId,
Repo,
SharePolicy,
} from "../src/index.js"
import { getRandomItem } from "../test/helpers/getRandomItem.js"

@@ -12,0 +14,0 @@

{
"name": "@automerge/automerge-repo",
"version": "1.0.7",
"version": "1.0.8",
"description": "A repository object to manage a collection of automerge documents",

@@ -60,3 +60,3 @@ "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",

},
"gitHead": "71060981f168e511a99ab85b155a54a13fd04bcc"
"gitHead": "d244a36aebab52a86f91b5427bbe8ab2800bf2f4"
}

@@ -17,8 +17,7 @@ import * as A from "@automerge/automerge/next"

import { waitFor } from "xstate/lib/waitFor.js"
import { headsAreSame } from "./helpers/headsAreSame.js"
import { pause } from "./helpers/pause.js"
import { TimeoutError, withTimeout } from "./helpers/withTimeout.js"
import type { DocumentId, PeerId, AutomergeUrl } from "./types.js"
import { stringifyAutomergeUrl } from "./DocUrl.js"
import { encode } from "./helpers/cbor.js"
import { headsAreSame } from "./helpers/headsAreSame.js"
import { withTimeout } from "./helpers/withTimeout.js"
import type { AutomergeUrl, DocumentId, PeerId } from "./types.js"

@@ -25,0 +24,0 @@ /** DocHandle is a wrapper around a single Automerge document that lets us

@@ -32,3 +32,5 @@ import {

documentId,
}: {documentId: DocumentId | BinaryDocumentId}): AutomergeUrl => {
}: {
documentId: DocumentId | BinaryDocumentId
}): AutomergeUrl => {
if (documentId instanceof Uint8Array)

@@ -35,0 +37,0 @@ return (urlPrefix +

@@ -1,2 +0,2 @@

import {Heads} from "@automerge/automerge/next"
import { Heads } from "@automerge/automerge/next"
import { arraysAreEqual } from "./arraysAreEqual.js"

@@ -3,0 +3,0 @@

@@ -29,6 +29,3 @@ /**

export { Repo } from "./Repo.js"
export { DocHandle } from "./DocHandle.js"
export { NetworkAdapter } from "./network/NetworkAdapter.js"
export { StorageAdapter } from "./storage/StorageAdapter.js"
export {

@@ -39,3 +36,6 @@ isValidAutomergeUrl,

} from "./DocUrl.js"
export { Repo } from "./Repo.js"
export { NetworkAdapter } from "./network/NetworkAdapter.js"
export { isValidRepoMessage } from "./network/messages.js"
export { StorageAdapter } from "./storage/StorageAdapter.js"

@@ -42,0 +42,0 @@ /** @hidden **/

@@ -7,4 +7,4 @@ import debug from "debug"

EphemeralMessage,
MessageContents,
RepoMessage,
MessageContents,
isEphemeralMessage,

@@ -11,0 +11,0 @@ isValidRepoMessage,

@@ -0,2 +1,12 @@

import { next as Automerge } from "@automerge/automerge"
import debug from "debug"
import { EventEmitter } from "eventemitter3"
import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
import {
generateAutomergeUrl,
isValidAutomergeUrl,
parseAutomergeUrl,
parseLegacyUUID,
} from "./DocUrl.js"
import { throttle } from "./helpers/throttle.js"
import { NetworkAdapter } from "./network/NetworkAdapter.js"

@@ -7,15 +17,4 @@ import { NetworkSubsystem } from "./network/NetworkSubsystem.js"

import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
import { type AutomergeUrl, DocumentId, PeerId } from "./types.js"
import { DocumentId, PeerId, type AutomergeUrl } from "./types.js"
import {
parseAutomergeUrl,
generateAutomergeUrl,
isValidAutomergeUrl,
parseLegacyUUID,
} from "./DocUrl.js"
import { DocHandle } from "./DocHandle.js"
import { EventEmitter } from "eventemitter3"
import { next as Automerge } from "@automerge/automerge"
/** A Repo is a collection of documents with networking, syncing, and storage capabilities. */

@@ -36,2 +35,7 @@ /** The `Repo` is the main entry point of this library

storageSubsystem?: StorageSubsystem
/** The debounce rate is adjustable on the repo. */
/** @hidden */
saveDebounceRate = 100
#handleCache: Record<DocumentId, DocHandle<any>> = {}

@@ -54,6 +58,13 @@

if (storageSubsystem) {
// Save when the document changes
handle.on("heads-changed", async ({ handle, doc }) => {
await storageSubsystem.saveDoc(handle.documentId, doc)
})
// Save when the document changes, but no more often than saveDebounceRate.
const saveFn = ({
handle,
doc,
}: DocHandleEncodedChangePayload<any>) => {
void storageSubsystem.saveDoc(handle.documentId, doc)
}
const debouncedSaveFn = handle.on(
"heads-changed",
throttle(saveFn, this.saveDebounceRate)
)

@@ -60,0 +71,0 @@ if (isNew) {

import * as A from "@automerge/automerge/next"
import { StorageAdapter, StorageKey } from "./StorageAdapter.js"
import debug from "debug"
import * as sha256 from "fast-sha256"
import { headsAreSame } from "../helpers/headsAreSame.js"
import { mergeArrays } from "../helpers/mergeArrays.js"
import { type DocumentId } from "../types.js"
import { mergeArrays } from "../helpers/mergeArrays.js"
import debug from "debug"
import { headsAreSame } from "../helpers/headsAreSame.js"
import { StorageAdapter, StorageKey } from "./StorageAdapter.js"

@@ -9,0 +9,0 @@ // Metadata about a chunk of data loaded from storage. This is stored on the

@@ -0,4 +1,6 @@

import debug from "debug"
import { DocHandle } from "../DocHandle.js"
import { stringifyAutomergeUrl } from "../DocUrl.js"
import { Repo } from "../Repo.js"
import { RepoMessage } from "../network/messages.js"
import { DocumentId, PeerId } from "../types.js"

@@ -8,4 +10,2 @@ import { DocSynchronizer } from "./DocSynchronizer.js"

import debug from "debug"
import { RepoMessage } from "../network/messages.js"
const log = debug("automerge-repo:collectionsync")

@@ -12,0 +12,0 @@

@@ -14,4 +14,4 @@ import * as A from "@automerge/automerge/next"

EphemeralMessage,
MessageContents,
RepoMessage,
MessageContents,
RequestMessage,

@@ -23,2 +23,3 @@ SyncMessage,

import { Synchronizer } from "./Synchronizer.js"
import { throttle } from "../helpers/throttle.js"

@@ -33,4 +34,3 @@ type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants"

#log: debug.Debugger
#conciseLog: debug.Debugger
#opsLog: debug.Debugger
syncDebounceRate = 100

@@ -52,7 +52,8 @@ /** Active peers */

const docId = handle.documentId.slice(0, 5)
this.#conciseLog = debug(`automerge-repo:concise:docsync:${docId}`) // Only logs one line per receive/send
this.#log = debug(`automerge-repo:docsync:${docId}`)
this.#opsLog = debug(`automerge-repo:ops:docsync:${docId}`) // Log list of ops of each message
handle.on("change", () => this.#syncWithPeers())
handle.on(
"change",
throttle(() => this.#syncWithPeers(), this.syncDebounceRate)
)

@@ -175,3 +176,5 @@ handle.on("ephemeral-message-outbound", payload =>

beginSync(peerIds: PeerId[]) {
const newPeers = new Set(peerIds.filter(peerId => !this.#peers.includes(peerId)))
const newPeers = new Set(
peerIds.filter(peerId => !this.#peers.includes(peerId))
)
this.#log(`beginSync: ${peerIds.join(", ")}`)

@@ -178,0 +181,0 @@

import { EventEmitter } from "eventemitter3"
import { RepoMessage, MessageContents } from "../network/messages.js"
import { MessageContents, RepoMessage } from "../network/messages.js"

@@ -4,0 +4,0 @@ export abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {

import assert from "assert"
import { describe, it, beforeEach } from "vitest"
import { beforeEach, describe, it } from "vitest"
import { PeerId, Repo } from "../src/index.js"

@@ -4,0 +4,0 @@ import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"

import * as A from "@automerge/automerge/next"
import assert from "assert"
import { decode } from "cbor-x"
import assert from "assert"
import { describe, it } from "vitest"

@@ -5,0 +5,0 @@ import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"

@@ -5,3 +5,2 @@ import assert from "assert"

import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
import { eventPromise } from "../src/helpers/eventPromise.js"

@@ -15,5 +14,2 @@ import {

import { TestDoc } from "./types.js"
import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
import { PeerId } from "../src/types.js"
import { TestDoc } from "./types.js"

@@ -20,0 +16,0 @@ const alice = "alice" as PeerId

import { NetworkAdapter } from "../../src/index.js"
export class DummyNetworkAdapter extends NetworkAdapter {
#startReady = true
constructor(startReady: boolean) {
#startReady: boolean
constructor({ startReady = true }: Options = {}) {
super()
this.#startReady = startReady
}
send() { }
send() {}
connect(_: string) {

@@ -15,3 +16,7 @@ if (this.#startReady) {

}
disconnect() { }
disconnect() {}
}
type Options = {
startReady?: boolean
}

@@ -14,6 +14,8 @@ import { StorageAdapter, type StorageKey } from "../../src/index.js"

async loadRange(keyPrefix: StorageKey): Promise<{data: Uint8Array, key: StorageKey}[]> {
async loadRange(
keyPrefix: StorageKey
): Promise<{ data: Uint8Array; key: StorageKey }[]> {
const range = Object.entries(this.#data)
.filter(([key, _]) => key.startsWith(this.#keyToString(keyPrefix)))
.map(([key, data]) => ({key: this.#stringToKey(key), data}))
.map(([key, data]) => ({ key: this.#stringToKey(key), data }))
return Promise.resolve(range)

@@ -20,0 +22,0 @@ }

@@ -28,6 +28,6 @@ import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"

describe("Repo", () => {
describe("single repo", () => {
const setup = (networkReady = true) => {
describe("local only", () => {
const setup = ({ startReady = true } = {}) => {
const storageAdapter = new DummyStorageAdapter()
const networkAdapter = new DummyNetworkAdapter(networkReady)
const networkAdapter = new DummyNetworkAdapter({ startReady })

@@ -186,3 +186,3 @@ const repo = new Repo({

it("doesn't mark a document as unavailable until network adapters are ready", async () => {
const { repo, networkAdapter } = setup(false)
const { repo, networkAdapter } = setup({ startReady: false })
const url = generateAutomergeUrl()

@@ -243,3 +243,3 @@ const handle = repo.find<TestDoc>(url)

await pause()
await pause(150)

@@ -383,21 +383,24 @@ const repo2 = new Repo({

describe("sync", async () => {
const charlieExcludedDocuments: DocumentId[] = []
const bobExcludedDocuments: DocumentId[] = []
describe("with peers (linear network)", async () => {
const setup = async ({ connectAlice = true } = {}) => {
const charlieExcludedDocuments: DocumentId[] = []
const bobExcludedDocuments: DocumentId[] = []
const sharePolicy: SharePolicy = async (peerId, documentId) => {
if (documentId === undefined) return false
const sharePolicy: SharePolicy = async (peerId, documentId) => {
if (documentId === undefined) return false
// make sure that charlie never gets excluded documents
if (charlieExcludedDocuments.includes(documentId) && peerId === "charlie")
return false
// make sure that charlie never gets excluded documents
if (
charlieExcludedDocuments.includes(documentId) &&
peerId === "charlie"
)
return false
// make sure that bob never gets excluded documents
if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
return false
// make sure that bob never gets excluded documents
if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
return false
return true
}
return true
}
const setupLinearNetwork = (connectAlice = true) => {
// Set up three repos; connect Alice to Bob, and Bob to Charlie

@@ -438,77 +441,12 @@

function doConnectAlice() {
function connectAliceToBob() {
aliceRepo.networkSubsystem.addNetworkAdapter(
new MessageChannelNetworkAdapter(ab)
)
//bobRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(bobToAlice))
}
if (connectAlice) {
doConnectAlice()
connectAliceToBob()
}
return {
teardown,
aliceRepo,
bobRepo,
charlieRepo,
connectAliceToBob: doConnectAlice,
}
}
const setupMeshNetwork = () => {
// Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie
const abChannel = new MessageChannel()
const bcChannel = new MessageChannel()
const acChannel = new MessageChannel()
const { port1: ab, port2: ba } = abChannel
const { port1: bc, port2: cb } = bcChannel
const { port1: ac, port2: ca } = acChannel
const aliceRepo = new Repo({
network: [
new MessageChannelNetworkAdapter(ab),
new MessageChannelNetworkAdapter(ac),
],
peerId: "alice" as PeerId,
sharePolicy,
})
const bobRepo = new Repo({
network: [
new MessageChannelNetworkAdapter(ba),
new MessageChannelNetworkAdapter(bc),
],
peerId: "bob" as PeerId,
sharePolicy,
})
const charlieRepo = new Repo({
network: [
new MessageChannelNetworkAdapter(ca),
new MessageChannelNetworkAdapter(cb),
],
peerId: "charlie" as PeerId,
})
const teardown = () => {
abChannel.port1.close()
bcChannel.port1.close()
acChannel.port1.close()
}
return {
teardown,
aliceRepo,
bobRepo,
charlieRepo,
}
}
const setup = async (connectAlice = true) => {
const { teardown, aliceRepo, bobRepo, charlieRepo, connectAliceToBob } =
setupLinearNetwork(connectAlice)
const aliceHandle = aliceRepo.create<TestDoc>()

@@ -615,3 +553,3 @@ aliceHandle.change(d => {

it("doesn't find a document which doesn't exist anywhere on the network", async () => {
const { charlieRepo } = await setup()
const { charlieRepo, teardown } = await setup()
const url = generateAutomergeUrl()

@@ -623,6 +561,8 @@ const handle = charlieRepo.find<TestDoc>(url)

assert.equal(doc, undefined)
teardown()
})
it("fires an 'unavailable' event when a document is not available on the network", async () => {
const { charlieRepo } = await setup()
const { charlieRepo, teardown } = await setup()
const url = generateAutomergeUrl()

@@ -641,2 +581,4 @@ const handle = charlieRepo.find<TestDoc>(url)

await eventPromise(handle2, "unavailable")
teardown()
})

@@ -651,3 +593,3 @@

connectAliceToBob,
} = await setup(false)
} = await setup({ connectAlice: false })

@@ -702,3 +644,3 @@ const url = stringifyAutomergeUrl({ documentId: notForCharlie })

peerId: "a" as PeerId,
sharePolicy: async () => true
sharePolicy: async () => true,
})

@@ -723,3 +665,2 @@

await handle.whenReady()
})

@@ -750,3 +691,3 @@

it("can emit an 'unavailable' event when it's not found on the network", async () => {
const { charlieRepo } = setupMeshNetwork()
const { charlieRepo, teardown } = await setup()

@@ -758,2 +699,3 @@ const url = generateAutomergeUrl()

await eventPromise(handle, "unavailable")
teardown()
})

@@ -788,3 +730,2 @@

}
await pause(500)

@@ -807,4 +748,2 @@ teardown()

await pause(50)
const charliePromise = new Promise<void>((resolve, reject) => {

@@ -831,8 +770,63 @@ charlieRepo.networkSubsystem.on("message", message => {

})
})
describe("with peers (mesh network)", () => {
const setup = async () => {
// Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie
const abChannel = new MessageChannel()
const bcChannel = new MessageChannel()
const acChannel = new MessageChannel()
const { port1: ab, port2: ba } = abChannel
const { port1: bc, port2: cb } = bcChannel
const { port1: ac, port2: ca } = acChannel
const aliceRepo = new Repo({
network: [
new MessageChannelNetworkAdapter(ab),
new MessageChannelNetworkAdapter(ac),
],
peerId: "alice" as PeerId,
})
const bobRepo = new Repo({
network: [
new MessageChannelNetworkAdapter(ba),
new MessageChannelNetworkAdapter(bc),
],
peerId: "bob" as PeerId,
})
const charlieRepo = new Repo({
network: [
new MessageChannelNetworkAdapter(ca),
new MessageChannelNetworkAdapter(cb),
],
peerId: "charlie" as PeerId,
})
const teardown = () => {
abChannel.port1.close()
bcChannel.port1.close()
acChannel.port1.close()
}
await Promise.all([
eventPromise(aliceRepo.networkSubsystem, "peer"),
eventPromise(bobRepo.networkSubsystem, "peer"),
eventPromise(charlieRepo.networkSubsystem, "peer"),
])
return {
teardown,
aliceRepo,
bobRepo,
charlieRepo,
}
}
it("can broadcast a message without entering into an infinite loop", async () => {
const { aliceRepo, bobRepo, charlieRepo } = setupMeshNetwork()
const { aliceRepo, bobRepo, charlieRepo, teardown } = await setup()
const message = { presence: "alex" }
const aliceHandle = aliceRepo.create<TestDoc>()

@@ -843,35 +837,29 @@

const aliceDoesntGetIt = new Promise<void>((resolve, reject) => {
setTimeout(() => {
resolve()
}, 100)
aliceHandle.on("ephemeral-message", () => {
reject(new Error("alice got the message"))
})
// Alice should not receive her own ephemeral message
aliceHandle.on("ephemeral-message", () => {
throw new Error("Alice should not receive her own ephemeral message")
})
// Bob and Charlie should receive Alice's ephemeral message
const bobGotIt = eventPromise(bobHandle, "ephemeral-message")
const charlieGotIt = eventPromise(charlieHandle, "ephemeral-message")
// let things get in sync and peers meet one another
// let peers meet and sync up
await pause(50)
// Alice sends an ephemeral message
const message = { foo: "bar" }
aliceHandle.broadcast(message)
const [bob, charlie] = await Promise.all([
bobGotIt,
charlieGotIt,
aliceDoesntGetIt,
])
const [bob, charlie] = await Promise.all([bobGotIt, charlieGotIt])
assert.deepStrictEqual(bob.message, message)
assert.deepStrictEqual(charlie.message, message)
teardown()
})
it("notifies peers when a document is cloned", async () => {
const { bobRepo, charlieRepo } = setupMeshNetwork()
const { bobRepo, charlieRepo, teardown } = await setup()
// pause to let the network set up
await pause(50)
const handle = bobRepo.create<TestDoc>()

@@ -889,10 +877,9 @@ handle.change(d => {

assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
teardown()
})
it("notifies peers when a document is merged", async () => {
const { bobRepo, charlieRepo } = setupMeshNetwork()
const { bobRepo, charlieRepo, teardown } = await setup()
// pause to let the network set up
await pause(50)
const handle = bobRepo.create<TestDoc>()

@@ -918,6 +905,8 @@ handle.change(d => {

// wait for the network to do it's thang
await pause(50)
await pause(350)
await charlieHandle.doc()
assert.deepStrictEqual(charlieHandle.docSync(), { foo: "baz" })
teardown()
})

@@ -927,9 +916,11 @@ })

const warn = console.warn
const NO_OP = () => {}
const disableConsoleWarn = () => {
console["_warn"] = console.warn
console.warn = () => {}
console.warn = NO_OP
}
const reenableConsoleWarn = () => {
console.warn = console["_warn"]
console.warn = warn
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc