Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@automerge/automerge-repo

Package Overview
Dependencies
Maintainers
4
Versions
69
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@automerge/automerge-repo - npm Package Compare versions

Comparing version 1.1.0-alpha.6 to 1.1.0-alpha.7

2

dist/AutomergeUrl.js

@@ -24,3 +24,3 @@ import * as Uuid from "uuid";

export const stringifyAutomergeUrl = (arg) => {
let documentId = arg instanceof Uint8Array || typeof arg === "string"
const documentId = arg instanceof Uint8Array || typeof arg === "string"
? arg

@@ -27,0 +27,0 @@ : "documentId" in arg

@@ -18,5 +18,5 @@ /** throttle( callback, rate )

timeout = setTimeout(() => {
fn.apply(null, args);
fn(...args);
}, rate);
};
};

@@ -35,3 +35,3 @@ /** Throttle

timeout = setTimeout(() => {
fn.apply(null, args);
fn(...args);
lastCall = Date.now();

@@ -38,0 +38,0 @@ }, wait);

@@ -10,4 +10,4 @@ import { EventEmitter } from "eventemitter3";

export class NetworkAdapter extends EventEmitter {
peerId; // hmmm, maybe not
peerId;
peerMetadata;
}

@@ -72,2 +72,4 @@ import debug from "debug";

networkAdapter.connect(this.peerId, peerMetadata);
}).catch(err => {
this.#log("error connecting to network", err);
});

@@ -74,0 +76,0 @@ }

@@ -251,3 +251,3 @@ import { EventEmitter } from "eventemitter3";

#isPeerSubscribedToDoc(peerId, documentId) {
let subscribedDocs = this.#subscribedDocsByPeer.get(peerId);
const subscribedDocs = this.#subscribedDocsByPeer.get(peerId);
return subscribedDocs && subscribedDocs.has(documentId);

@@ -266,7 +266,7 @@ }

if (!remote) {
remote = new Map([[storageId, { heads, timestamp }]]);
remote = new Map();
this.#knownHeads.set(documentId, remote);
}
const docRemote = remote.get(storageId);
if (docRemote && docRemote.timestamp > timestamp) {
if (docRemote && docRemote.timestamp >= timestamp) {
continue;

@@ -273,0 +273,0 @@ }

@@ -70,2 +70,7 @@ import { EventEmitter } from "eventemitter3";

id: AnyDocumentId): void;
/**
* Imports document binary into the repo.
* @param binary - The binary to import
*/
import<T>(binary: Uint8Array): DocHandle<T>;
subscribeToRemotes: (remotes: StorageId[]) => void;

@@ -72,0 +77,0 @@ storageId: () => Promise<StorageId | undefined>;

@@ -51,3 +51,3 @@ import { next as Automerge } from "@automerge/automerge";

};
const debouncedSaveFn = handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate));
handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate));
if (isNew) {

@@ -114,3 +114,5 @@ // this is a new document, immediately save it

// The network subsystem deals with sending and receiving messages to and from peers.
const myPeerMetadata = new Promise(async (resolve, reject) => resolve({
const myPeerMetadata = new Promise(
// eslint-disable-next-line no-async-promise-executor -- TODO: fix
async (resolve) => resolve({
storageId: await storageSubsystem?.id(),

@@ -223,3 +225,6 @@ isEphemeral,

handler = this.#throttledSaveSyncStateHandlers[storageId] = throttle(({ documentId, syncState }) => {
this.storageSubsystem.saveSyncState(documentId, storageId, syncState);
this.storageSubsystem.saveSyncState(documentId, storageId, syncState)
.catch(err => {
this.#log("error saving sync state", { err });
});
}, this.saveDebounceRate);

@@ -346,2 +351,14 @@ }

}
/**
* Imports document binary into the repo.
* @param binary - The binary to import
*/
import(binary) {
const doc = Automerge.load(binary);
const handle = this.create();
handle.update(() => {
return Automerge.clone(doc);
});
return handle;
}
subscribeToRemotes = (remotes) => {

@@ -348,0 +365,0 @@ this.#log("subscribeToRemotes", { remotes });

@@ -26,3 +26,3 @@ import * as A from "@automerge/automerge/next";

async id() {
let storedId = await this.#storageAdapter.load(["storage-adapter-id"]);
const storedId = await this.#storageAdapter.load(["storage-adapter-id"]);
let id;

@@ -29,0 +29,0 @@ if (storedId) {

@@ -82,2 +82,4 @@ import * as A from "@automerge/automerge/next";

this.#initSyncState(peerId, syncState ?? A.initSyncState());
}).catch(err => {
this.#log(`Error loading sync state for ${peerId}: ${err}`);
});

@@ -182,2 +184,4 @@ pendingCallbacks = this.#pendingSyncStateCallbacks[peerId] = [];

}
}).catch(err => {
this.#log(`Error loading doc for ${peerId}: ${err}`);
});

@@ -184,0 +188,0 @@ });

{
"name": "@automerge/automerge-repo",
"version": "1.1.0-alpha.6",
"version": "1.1.0-alpha.7",
"description": "A repository object to manage a collection of automerge documents",

@@ -12,2 +12,3 @@ "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",

"build": "tsc",
"lint": "eslint --ext .ts src",
"watch": "npm-watch build",

@@ -24,3 +25,3 @@ "test:coverage": "c8 --reporter=lcov --reporter=html --reporter=text yarn test",

"http-server": "^14.1.0",
"vite": "^4.4.11"
"vite": "^5.0.8"
},

@@ -60,3 +61,3 @@ "dependencies": {

},
"gitHead": "77ad0475a3b241a8efdf4e282fdffc8fed09b101"
"gitHead": "9a4711e39c93273d992c5686257246ddfaaafddd"
}

@@ -48,2 +48,4 @@ # Automerge Repo

Deletes the local copy of a document from the local cache and local storage. _This does not currently delete the document from any other peers_.
- `import(binary: Uint8Array)`
Imports a document binary (from `Automerge.save(doc)`) into the repo, returning a new handle
- `.on("document", ({handle: DocHandle}) => void)`

@@ -68,3 +70,3 @@ Registers a callback to be fired each time a new document is loaded or created.

- `change({handle: DocHandle, patches: Patch[], patchInfo: PatchInfo})`
- `change({handle: DocHandle, patches: Patch[], patchInfo: PatchInfo})`
Called whenever the document changes, the handle's .doc

@@ -90,3 +92,3 @@ - `delete`

storage: new IndexedDBStorageAdapter(),
sharePolicy: async (peerId: PeerId, documentId: DocumentId) => true // this is the default
sharePolicy: async (peerId: PeerId, documentId: DocumentId) => true, // this is the default
})

@@ -96,2 +98,3 @@ ```

### Share Policy
The share policy is used to determine which document in your repo should be _automatically_ shared with other peers. **The default setting is to share all documents with all peers.**

@@ -106,3 +109,2 @@

```ts
## Starting the demo app

@@ -280,5 +282,6 @@

With gratitude for contributions by:
- Herb Caudill
- Jeremy Rose
- Alex Currie-Clark
- Dylan Mackenzie
- Herb Caudill
- Jeremy Rose
- Alex Currie-Clark
- Dylan Mackenzie

@@ -36,3 +36,3 @@ import type {

) => {
let documentId =
const documentId =
arg instanceof Uint8Array || typeof arg === "string"

@@ -39,0 +39,0 @@ ? arg

@@ -22,5 +22,5 @@ /** throttle( callback, rate )

timeout = setTimeout(() => {
fn.apply(null, args)
fn(...args)
}, rate)
}
}

@@ -39,3 +39,3 @@ /** Throttle

timeout = setTimeout(() => {
fn.apply(null, args)
fn(...args)
lastCall = Date.now()

@@ -42,0 +42,0 @@ }, wait)

@@ -25,3 +25,3 @@ import { EventEmitter } from "eventemitter3"

export abstract class NetworkAdapter extends EventEmitter<NetworkAdapterEvents> {
peerId?: PeerId // hmmm, maybe not
peerId?: PeerId
peerMetadata?: PeerMetadata

@@ -28,0 +28,0 @@

@@ -16,3 +16,2 @@ import debug from "debug"

} from "./messages.js"
import { StorageId } from "../storage/types.js"

@@ -112,2 +111,4 @@ type EphemeralMessageSource = `${PeerId}:${SessionId}`

networkAdapter.connect(this.peerId, peerMetadata)
}).catch(err => {
this.#log("error connecting to network", err)
})

@@ -114,0 +115,0 @@ }

@@ -329,3 +329,3 @@ import { next as A } from "@automerge/automerge"

#isPeerSubscribedToDoc(peerId: PeerId, documentId: DocumentId) {
let subscribedDocs = this.#subscribedDocsByPeer.get(peerId)
const subscribedDocs = this.#subscribedDocsByPeer.get(peerId)
return subscribedDocs && subscribedDocs.has(documentId)

@@ -352,3 +352,3 @@ }

if (!remote) {
remote = new Map([[storageId as StorageId, { heads, timestamp }]])
remote = new Map()
this.#knownHeads.set(documentId, remote)

@@ -358,3 +358,3 @@ }

const docRemote = remote.get(storageId as StorageId)
if (docRemote && docRemote.timestamp > timestamp) {
if (docRemote && docRemote.timestamp >= timestamp) {
continue

@@ -361,0 +361,0 @@ } else {

@@ -80,3 +80,3 @@ import { next as Automerge } from "@automerge/automerge"

}
const debouncedSaveFn = handle.on(
handle.on(
"heads-changed",

@@ -157,3 +157,4 @@ throttle(saveFn, this.saveDebounceRate)

const myPeerMetadata: Promise<PeerMetadata> = new Promise(
async (resolve, reject) =>
// eslint-disable-next-line no-async-promise-executor -- TODO: fix
async (resolve) =>
resolve({

@@ -305,2 +306,5 @@ storageId: await storageSubsystem?.id(),

this.storageSubsystem!.saveSyncState(documentId, storageId, syncState)
.catch(err => {
this.#log("error saving sync state", { err })
})
},

@@ -456,2 +460,18 @@ this.saveDebounceRate

/**
* Imports document binary into the repo.
* @param binary - The binary to import
*/
import<T>(binary: Uint8Array) {
const doc = Automerge.load<T>(binary)
const handle = this.create<T>()
handle.update(() => {
return Automerge.clone(doc)
})
return handle
}
subscribeToRemotes = (remotes: StorageId[]) => {

@@ -458,0 +478,0 @@ this.#log("subscribeToRemotes", { remotes })

@@ -36,3 +36,3 @@ import * as A from "@automerge/automerge/next"

async id(): Promise<StorageId> {
let storedId = await this.#storageAdapter.load(["storage-adapter-id"])
const storedId = await this.#storageAdapter.load(["storage-adapter-id"])

@@ -39,0 +39,0 @@ let id: StorageId

@@ -5,3 +5,3 @@ import debug from "debug"

import { Repo } from "../Repo.js"
import { DocMessage, RepoMessage } from "../network/messages.js"
import { DocMessage } from "../network/messages.js"
import { DocumentId, PeerId } from "../types.js"

@@ -8,0 +8,0 @@ import { DocSynchronizer } from "./DocSynchronizer.js"

@@ -142,2 +142,4 @@ import * as A from "@automerge/automerge/next"

this.#initSyncState(peerId, syncState ?? A.initSyncState())
}).catch(err => {
this.#log(`Error loading sync state for ${peerId}: ${err}`)
})

@@ -268,2 +270,4 @@ pendingCallbacks = this.#pendingSyncStateCallbacks[peerId] = []

}
}).catch(err => {
this.#log(`Error loading doc for ${peerId}: ${err}`)
})

@@ -270,0 +274,0 @@ })

@@ -168,7 +168,5 @@ import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"

// we should only be notified of the head changes of doc A
assert.strictEqual(remoteHeadsChangedMessages.length, 1)
assert.strictEqual(
remoteHeadsChangedMessages[0].documentId,
leftTabDocA.documentId
)
const docIds = remoteHeadsChangedMessages.map(d => d.documentId)
const uniqueDocIds = [...new Set(docIds)]
assert.deepStrictEqual(uniqueDocIds, [leftTabDocA.documentId])
})

@@ -175,0 +173,0 @@

@@ -290,3 +290,3 @@ import * as A from "@automerge/automerge"

it("should ignore sync states with an older timestamp", async () => {
it("should only notify of sync states with a more recent timestamp", async () => {
const remoteHeadsSubscription = new RemoteHeadsSubscriptions()

@@ -302,2 +302,5 @@

// send same message
remoteHeadsSubscription.handleRemoteHeads(docBHeadsChangedForStorageB2)
// send message with old heads

@@ -304,0 +307,0 @@ remoteHeadsSubscription.handleRemoteHeads(docBHeadsChangedForStorageB)

@@ -5,3 +5,3 @@ import { next as A } from "@automerge/automerge"

import * as Uuid from "uuid"
import { describe, it } from "vitest"
import { describe, expect, it } from "vitest"
import { READY } from "../src/DocHandle.js"

@@ -400,2 +400,26 @@ import { parseAutomergeUrl } from "../src/AutomergeUrl.js"

})
it("can import an existing document", async () => {
const { repo } = setup()
const doc = A.init<TestDoc>()
const updatedDoc = A.change(doc, d => {
d.foo = "bar"
})
const saved = A.save(updatedDoc)
const handle = repo.import<TestDoc>(saved)
assert.equal(handle.isReady(), true)
const v = await handle.doc()
assert.equal(v?.foo, "bar")
expect(A.getHistory(v)).toEqual(A.getHistory(updatedDoc))
})
it("throws an error if we try to import an invalid document", async () => {
const { repo } = setup()
expect(() => {
repo.import<TestDoc>(A.init<TestDoc> as unknown as Uint8Array)
}).toThrow()
})
})

@@ -402,0 +426,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc