Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@blocksuite/store

Package Overview
Dependencies
Maintainers
5
Versions
1280
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@blocksuite/store - npm Package Compare versions

Comparing version 0.4.0-20230201220903-262005c to 0.4.0-20230203001426-a8854f1

14

dist/awareness.d.ts

@@ -32,2 +32,9 @@ /// <reference types="@blocksuite/global" />

response?: Response[];
/**
* After insert a blob block with cloud sync,
* uploading will trigger automatically,
* blob id will add to this property,
* and will remove after finish.
*/
blobUploading?: string[];
};

@@ -39,2 +46,6 @@ interface AwarenessEvent<Flags extends Record<string, unknown> = BlockSuiteFlags> {

}
export declare enum BlobUploadState {
Uploading = 0,
Uploaded = 1
}
export declare class AwarenessStore<Flags extends Record<string, unknown> = BlockSuiteFlags> {

@@ -51,2 +62,5 @@ readonly awareness: YAwareness<RawAwarenessState<Flags>>;

isReadonly(space: Space): boolean;
setBlobState(blobId: string, state: BlobUploadState): void;
getBlobState(blobId: string): BlobUploadState;
isBlobUploading(blobId: string): boolean;
setRemoteFlag<Key extends keyof Flags>(clientId: number, field: Key, value: Flags[Key]): void;

@@ -53,0 +67,0 @@ setLocalCursor(space: Space, range: SelectionRange | null): void;

@@ -5,2 +5,7 @@ import * as Y from 'yjs';

import { uuidv4 } from './utils/id-generator.js';
export var BlobUploadState;
(function (BlobUploadState) {
BlobUploadState[BlobUploadState["Uploading"] = 0] = "Uploading";
BlobUploadState[BlobUploadState["Uploaded"] = 1] = "Uploaded";
})(BlobUploadState || (BlobUploadState = {}));
export class AwarenessStore {

@@ -82,2 +87,35 @@ constructor(store, awareness, defaultFlags) {

}
setBlobState(blobId, state) {
const uploading = [
...(this.awareness.getLocalState()?.blobUploading ?? []),
];
if (state === BlobUploadState.Uploading) {
if (!uploading.includes(blobId)) {
uploading.push(blobId);
}
}
else if (state === BlobUploadState.Uploaded) {
const position = uploading.findIndex(id => id === blobId);
if (position > -1) {
uploading.splice(position, 1);
}
}
this.awareness.setLocalStateField('blobUploading', uploading);
}
getBlobState(blobId) {
// FIXME: if clientA and clientB both upload a same image,
// both clients could not show image correctly.
const found = [...this.awareness.getStates().entries()].find(([clientId, state]) => {
// assume local blob always exist, because we cache it in indexedDB
if (clientId === this.awareness.clientID) {
return;
}
const uploading = state?.blobUploading ?? [];
return uploading.includes(blobId);
});
return found ? BlobUploadState.Uploading : BlobUploadState.Uploaded;
}
isBlobUploading(blobId) {
return this.getBlobState(blobId) === BlobUploadState.Uploading;
}
setRemoteFlag(clientId, field, value) {

@@ -84,0 +122,0 @@ if (!this.getFlag('enable_set_remote_flag')) {

30

dist/persistence/blob/cloud-sync-manager.d.ts

@@ -1,24 +0,18 @@

import { Signal } from '@blocksuite/global/utils';
import type { BlobId, BlobProvider } from './types.js';
import type { BlobOptionsGetter } from './duplex-provider.js';
import type { BlobId, BlobURL, IDBInstance } from './types.js';
export declare class CloudSyncManager {
private readonly _abortController;
private readonly _db;
private readonly _fetcher;
private readonly _database;
private readonly _pending;
private readonly _pendingPipeline;
private readonly _workspace;
private readonly _onUploadFinished;
private _pipeline;
private initialized;
private _uploadingIds;
private _onUploadStateChanged;
constructor(workspace: string, blobOptionsGetter: BlobOptionsGetter, db: IDBInstance, onUploadStateChanged: Signal<boolean>, onUploadFinished: Signal<BlobId>);
private _handleTaskRetry;
private _taskRunner;
private _addUploadId;
private _removeUploadId;
get(id: BlobId): Promise<BlobURL | null>;
addTask(id: BlobId, type: 'add' | 'delete'): Promise<void>;
private _failed;
private _whenReady;
private _running;
private _workspace;
readonly signals: BlobProvider['signals'];
constructor(workspace: string, blobOptionsGetter: BlobOptionsGetter);
get running(): boolean;
private _runTasks;
addTask(id: BlobId, blob: Blob): Promise<void>;
get(id: BlobId): Promise<Blob | null>;
}
//# sourceMappingURL=cloud-sync-manager.d.ts.map

185

dist/persistence/blob/cloud-sync-manager.js

@@ -1,17 +0,16 @@

import { sleep } from '@blocksuite/global/utils';
import ky from 'ky';
import { Signal, sleep } from '@blocksuite/global/utils';
import { BlobSyncState } from './types.js';
import { getDatabase } from './utils.js';
const RETRY_TIMEOUT = 500;
export class CloudSyncManager {
constructor(workspace, blobOptionsGetter, db, onUploadStateChanged, onUploadFinished) {
this._abortController = new AbortController();
this._pendingPipeline = [];
constructor(workspace, blobOptionsGetter) {
this._pipeline = [];
this.initialized = false;
this._uploadingIds = new Set();
this._onUploadFinished = onUploadFinished;
this._onUploadStateChanged = onUploadStateChanged;
this._failed = [];
this._running = false;
this.signals = {
onBlobSyncStateChange: new Signal(),
};
this._workspace = workspace;
this._fetcher = ky.create({
prefixUrl: blobOptionsGetter('api'),
signal: this._abortController.signal,
throwHttpErrors: false,

@@ -29,93 +28,94 @@ hooks: {

});
this._database = db;
this._pending = getDatabase('pending', workspace);
this._workspace = workspace;
this._pending.keys().then(async (keys) => {
this._pipeline = (await Promise.all(keys.map(async (id) => {
const { retry = 0, type } = (await this._pending.get(id)) || {};
const blob = await db.get(id);
if ((blob || type === 'delete') && type !== 'upload') {
return { id, blob, retry, type };
}
if (blob && type === 'upload') {
console.log('try resume uploading blob:', id);
this.addTask(id, 'add');
}
return undefined;
}))).filter((v) => !!v);
this.initialized = true;
this._pendingPipeline.forEach(task => this._pipeline.push(task));
this._pendingPipeline.length = 0;
this._taskRunner();
this._db = getDatabase('pending', this._workspace);
this._whenReady = this._db.values().then(tasks => {
this._pipeline.push(...tasks.map(task => ({
...task,
retry: 0,
// force retry failed task
failed: false,
})));
this._runTasks();
});
}
async _handleTaskRetry(task, response) {
this._removeUploadId(task.id);
if (response?.status === 413 ||
response?.status === 200 ||
task.retry >= 10) {
// if blob is too large, may try to upload it forever
if (response?.status === 413) {
console.log('blob too large:', task.id);
}
await this._pending.delete(task.id);
this._onUploadFinished.emit(task.id);
get running() {
return this._running;
}
async _runTasks() {
if (this._running) {
return;
}
else {
await this._pending.set(task.id, {
type: task.type,
retry: task.retry + 1,
});
this._pipeline.push({ ...task, retry: task.retry + 1 });
await sleep(Math.min(10, task.retry) * 100);
}
}
async _taskRunner() {
const signal = this._abortController.signal;
while (!signal.aborted) {
let task;
while (typeof (task = this._pipeline.shift()) !== 'undefined' &&
!signal.aborted) {
try {
const resp = await this._fetcher.head(`${this._workspace}/blob/${task.id}`);
if (resp.status === 404) {
await this._pending.set(task.id, {
type: 'upload',
retry: task.retry,
this._running = true;
while (this._pipeline.length) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const task = this._pipeline.shift();
try {
const resp = await this._fetcher.head(`${this._workspace}/blob/${task.id}`);
if (resp.status === 404) {
this.signals.onBlobSyncStateChange.emit({
id: task.id,
state: BlobSyncState.Syncing,
});
const response = await this._fetcher.put(`${this._workspace}/blob`, {
body: task.blob,
});
if (response.status === 200) {
this.signals.onBlobSyncStateChange.emit({
id: task.id,
state: BlobSyncState.Success,
});
this._addUploadId(task.id);
const response = await this._fetcher.put(`${this._workspace}/blob`, {
body: task.blob,
retry: task.retry,
await this._db.delete(task.id);
continue;
}
if (response.status === 413 || task.retry >= 10) {
if (response.status === 413) {
console.log('blob too large:', task.id);
}
await this._db.set(task.id, { ...task, failed: true });
this._failed.push({ ...task, failed: true });
this.signals.onBlobSyncStateChange.emit({
id: task.id,
state: BlobSyncState.Failed,
});
await this._handleTaskRetry(task, response);
continue;
}
this._pipeline.push({
...task,
retry: task.retry + 1,
});
}
catch (e) {
console.warn('Error while syncing blob', e);
if (e)
await this._handleTaskRetry(task);
this._taskRunner();
}
}
await sleep(RETRY_TIMEOUT);
catch (e) {
console.warn('Error while syncing blob', e);
this.signals.onBlobSyncStateChange.emit({
id: task.id,
state: BlobSyncState.Failed,
});
}
// task interval
await sleep(500);
}
console.error('CloudSyncManager taskRunner exited');
this._running = false;
}
_addUploadId(id) {
this._uploadingIds.add(id);
this._onUploadStateChanged.emit(Boolean(this._uploadingIds.size));
async addTask(id, blob) {
await this._whenReady;
const buffer = await blob.arrayBuffer();
await this._db.set(id, { id, blob: buffer });
this._pipeline.push({
id,
blob: buffer,
retry: 0,
});
this._runTasks();
this.signals.onBlobSyncStateChange.emit({
id,
state: BlobSyncState.Waiting,
});
return;
}
_removeUploadId(id) {
this._uploadingIds.delete(id);
this._onUploadStateChanged.emit(Boolean(this._uploadingIds.size));
}
async get(id) {
const endpoint = `${this._workspace}/blob/${id}`;
try {
const blob = await this._fetcher
return await this._fetcher
.get(endpoint, { throwHttpErrors: true })
.blob();
await this._database.set(id, await blob.arrayBuffer());
return URL.createObjectURL(blob);
}

@@ -127,18 +127,3 @@ catch (e) {

}
async addTask(id, type) {
const blob = await this._database.get(id);
if (blob || type === 'delete') {
if (this.initialized) {
this._pipeline.push({ id, blob, type, retry: 0 });
}
else {
this._pendingPipeline.push({ id, blob, type, retry: 0 });
}
console.log(this._pipeline, this._pendingPipeline);
}
else {
console.error('Blob not found', id);
}
}
}
//# sourceMappingURL=cloud-sync-manager.js.map
import { Signal } from '@blocksuite/global/utils';
import type { BlobId, BlobProvider, BlobURL } from './types.js';
import type { BlobId, BlobProvider, BlobURL, BlobSyncStateChangeEvent } from './types.js';
export type BlobOptions = Record<'api' | 'token', string>;

@@ -12,14 +12,9 @@ export type BlobOptionsGetter = (key: keyof BlobOptions) => string | undefined;

private readonly _cloudManager?;
private _uploading;
readonly blobs: Set<string>;
readonly signals: {
blobAdded: Signal<string>;
blobDeleted: Signal<string>;
uploadStateChanged: Signal<boolean>;
uploadFinished: Signal<string>;
onBlobSyncStateChange: Signal<BlobSyncStateChangeEvent>;
};
static init(workspace: string, optionsGetter?: BlobOptionsGetter): Promise<DuplexBlobProvider>;
private _initBlobs;
private constructor();
get uploading(): boolean;
get blobs(): Promise<string[]>;
get(id: BlobId): Promise<BlobURL | null>;

@@ -26,0 +21,0 @@ set(blob: Blob): Promise<BlobId>;

@@ -21,21 +21,7 @@ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {

const provider = new DuplexBlobProvider_1(workspace, optionsGetter);
await provider._initBlobs();
return provider;
}
async _initBlobs() {
const entries = await this._localDB.keys();
for (const key of entries) {
const blobId = key;
this.signals.blobAdded.emit(blobId);
this.blobs.add(blobId);
}
}
constructor(workspace, optionsGetter) {
this._uploading = false;
this.blobs = new Set();
this.signals = {
blobAdded: new Signal(),
blobDeleted: new Signal(),
uploadStateChanged: new Signal(),
uploadFinished: new Signal(),
onBlobSyncStateChange: new Signal(),
};

@@ -46,19 +32,22 @@ this._localDB = getDatabase('blob', workspace);

assertExists(optionsGetter);
this.signals.uploadStateChanged.on(uploading => {
this._uploading = uploading;
this._cloudManager = new CloudSyncManager(workspace, optionsGetter);
this._cloudManager.signals.onBlobSyncStateChange.on(blobState => {
this.signals.onBlobSyncStateChange.emit(blobState);
});
this._cloudManager = new CloudSyncManager(workspace, optionsGetter, this._localDB, this.signals.uploadStateChanged, this.signals.uploadFinished);
}
}
get uploading() {
return this._uploading;
return Boolean(this._cloudManager?.running);
}
get blobs() {
return this._localDB.keys();
}
async get(id) {
const blob = await this._localDB.get(id);
if (!blob) {
const blob = this._cloudManager?.get(id);
const blob = await this._cloudManager?.get(id);
if (blob) {
this.signals.blobAdded.emit(id);
this.blobs.add(id);
return blob;
const buffer = await blob.arrayBuffer();
await this._localDB.set(id, buffer);
return URL.createObjectURL(blob);
}

@@ -75,6 +64,4 @@ return null;

await this._localDB.set(hash, buffer);
this.signals.blobAdded.emit(hash);
this.blobs.add(hash);
}
this._cloudManager?.addTask(hash, 'add');
this._cloudManager?.addTask(hash, blob);
return hash;

@@ -84,9 +71,7 @@ }

await this._localDB.delete(id);
this.signals.blobDeleted.emit(id);
this.blobs.delete(id);
this._cloudManager?.addTask(id, 'delete');
// NOTE: should we delete blob in cloud? When?
// this._cloudManager?.addTask(id, 'delete');
}
async clear() {
await this._localDB.clear();
this.blobs.clear();
}

@@ -93,0 +78,0 @@ };

@@ -6,2 +6,3 @@ import type { BlobOptionsGetter, BlobOptions } from './duplex-provider.js';

export type { BlobOptionsGetter, BlobOptions };
export { BlobSyncState } from './types.js';
//# sourceMappingURL=index.d.ts.map

@@ -11,3 +11,3 @@ import { DuplexBlobProvider } from './duplex-provider.js';

const provider = await DuplexBlobProvider.init(workspace, optionsGetter);
storage.addProvider(provider);
storage.setProvider(provider);
return storage;

@@ -18,2 +18,3 @@ }

export { BlobStorage } from './storage.js';
export { BlobSyncState } from './types.js';
//# sourceMappingURL=index.js.map
import { Signal } from '@blocksuite/global/utils';
import type { BlobId, BlobProvider, BlobURL } from './types.js';
import type { BlobId, BlobProvider, BlobURL, BlobSyncStateChangeEvent } from './types.js';
export declare class BlobStorage {
private _providers;
private _provider;
signals: {
blobAdded: Signal<string>;
uploadStateChanged: Signal<boolean>;
onBlobSyncStateChange: Signal<BlobSyncStateChangeEvent>;
};
get uploading(): boolean;
get providers(): Readonly<BlobProvider[]>;
get blobs(): Set<BlobId>;
addProvider(provider: BlobProvider): void;
removeProvider(provider: BlobProvider): void;
get blobs(): Promise<string[]>;
setProvider(provider: BlobProvider | null): void;
get(id: BlobId): Promise<BlobURL | null>;

@@ -15,0 +12,0 @@ set(blob: Blob): Promise<BlobId>;

import { Signal } from '@blocksuite/global/utils';
function assertProviderExist(provider) {
if (!provider) {
throw new Error('No provider found for blob storage');
}
}
export class BlobStorage {
constructor() {
this._providers = [];
this._provider = null;
this.signals = {
blobAdded: new Signal(),
uploadStateChanged: new Signal(),
onBlobSyncStateChange: new Signal(),
};
}
get uploading() {
return this._providers.some(p => p.uploading);
return this._provider?.uploading ?? true;
}
get providers() {
return this._providers;
get blobs() {
return this._provider?.blobs ?? Promise.resolve([]);
}
get blobs() {
// merge all blobs from all providers
const result = new Set();
for (const provider of this._providers) {
for (const blob of provider.blobs) {
result.add(blob);
}
setProvider(provider) {
if (!provider) {
this._provider = null;
return;
}
return result;
}
addProvider(provider) {
this._providers.push(provider);
provider.signals.blobAdded.on(blobId => {
this.signals.blobAdded.emit(blobId);
this._provider = provider;
this._provider.signals.onBlobSyncStateChange.on(state => {
this.signals.onBlobSyncStateChange.emit(state);
});
provider.signals.uploadStateChanged?.on(() => {
this.signals.uploadStateChanged.emit(this.uploading);
});
}
removeProvider(provider) {
this._providers = this._providers.filter(p => p !== provider);
}
async get(id) {
for (const provider of this._providers) {
try {
return await provider.get(id);
}
catch (e) {
console.warn(e);
}
}
throw new Error(`No provider found for blob ${id}`);
assertProviderExist(this._provider);
return await this._provider.get(id);
}
async set(blob) {
let result = null;
for (const provider of this._providers) {
try {
result = await provider.set(blob);
}
catch (e) {
console.warn(e);
}
}
if (result === null)
throw new Error('No provider found for blob');
return result;
assertProviderExist(this._provider);
return await this._provider.set(blob);
}
async delete(id) {
for (const provider of this._providers) {
try {
await provider.delete(id);
}
catch (e) {
console.warn(e);
}
}
assertProviderExist(this._provider);
await this._provider.delete(id);
}
async clear() {
for (const provider of this._providers) {
try {
await provider.clear();
}
catch (e) {
console.warn(e);
}
}
assertProviderExist(this._provider);
await this._provider.clear();
}
}
//# sourceMappingURL=storage.js.map
import type { Signal } from '@blocksuite/global/utils';
export type BlobId = string;
export type BlobURL = string;
export declare enum BlobSyncState {
Waiting = 0,
Syncing = 1,
Success = 2,
Failed = 3
}
export interface BlobSyncStateChangeEvent {
id: BlobId;
state: BlobSyncState;
}
export interface BlobProvider {
blobs: Set<BlobId>;
uploading: boolean;
blobs: Promise<string[]>;
signals: {
blobAdded: Signal<BlobId>;
uploadStateChanged?: Signal<boolean>;
onBlobSyncStateChange: Signal<BlobSyncStateChangeEvent>;
};

@@ -16,2 +25,10 @@ get(id: BlobId): Promise<BlobURL | null>;

}
export interface PendingTask {
id: BlobId;
blob: ArrayBufferLike | undefined;
failed?: boolean;
}
export interface SyncTask extends PendingTask {
retry: number;
}
export type IDBInstance<T = ArrayBufferLike> = {

@@ -22,2 +39,3 @@ get: (key: BlobId) => Promise<T | undefined>;

keys: () => Promise<BlobId[]>;
values: () => Promise<T[]>;
delete: (key: BlobId) => Promise<void>;

@@ -24,0 +42,0 @@ clear: () => Promise<void>;

@@ -1,2 +0,8 @@

export {};
export var BlobSyncState;
(function (BlobSyncState) {
BlobSyncState[BlobSyncState["Waiting"] = 0] = "Waiting";
BlobSyncState[BlobSyncState["Syncing"] = 1] = "Syncing";
BlobSyncState[BlobSyncState["Success"] = 2] = "Success";
BlobSyncState[BlobSyncState["Failed"] = 3] = "Failed";
})(BlobSyncState || (BlobSyncState = {}));
//# sourceMappingURL=types.js.map

@@ -1,2 +0,2 @@

import { createStore, del, get, keys, set, clear } from 'idb-keyval';
import { createStore, del, get, keys, set, clear, values } from 'idb-keyval';
import { Buffer } from 'buffer';

@@ -15,2 +15,3 @@ export async function sha(input) {

keys: () => keys(db),
values: () => values(db),
delete: (key) => del(key, db),

@@ -17,0 +18,0 @@ clear: () => clear(db),

@@ -51,7 +51,14 @@ import * as Y from 'yjs';

export declare class Text {
private _space;
private _yText;
/**
* @internal
*/
delayedJobs: (() => void)[];
private _shouldTransact;
constructor(space: Space, input: Y.Text | string);
static fromDelta(space: Space, delta: DeltaOperation[]): Text;
constructor(input: Y.Text | string);
/**
* @internal
*/
doDelayedJobs(): void;
static fromDelta(delta: DeltaOperation[]): Text;
get length(): number;

@@ -58,0 +65,0 @@ private _transact;

@@ -70,6 +70,9 @@ /* eslint-disable @typescript-eslint/no-explicit-any */

export class Text {
constructor(space, input) {
constructor(input) {
/**
* @internal
*/
this.delayedJobs = [];
// TODO toggle transact by options
this._shouldTransact = true;
this._space = space;
if (typeof input === 'string') {

@@ -82,5 +85,13 @@ this._yText = new Y.Text(input);

}
static fromDelta(space, delta) {
const result = new Text(space, '');
result.applyDelta(delta);
/**
* @internal
*/
doDelayedJobs() {
this.delayedJobs.forEach(cb => cb());
this.delayedJobs = [];
}
static fromDelta(delta) {
const result = new Text('');
// In the first time, yDoc does not exist.
result.delayedJobs.push(() => result.applyDelta(delta));
return result;

@@ -92,11 +103,17 @@ }

_transact(callback) {
if (this._space.awarenessStore.isReadonly(this._space)) {
console.error('cannot modify data in readonly mode');
return;
if (this._shouldTransact) {
const doc = this._yText.doc;
if (!doc) {
throw new Error('cannot find doc');
}
doc.transact(() => {
callback();
}, doc.clientID);
}
const { _space, _shouldTransact } = this;
_shouldTransact ? _space.transact(callback) : callback();
else {
callback();
}
}
clone() {
return new Text(this._space, this._yText.clone());
return new Text(this._yText.clone());
}

@@ -103,0 +120,0 @@ split(index, length) {

@@ -68,2 +68,3 @@ import * as Y from 'yjs';

yBlock.set('prop:text', text._yText);
text.doDelayedJobs();
return;

@@ -70,0 +71,0 @@ }

@@ -11,5 +11,4 @@ /// <reference types="@blocksuite/global" />

import type { BlockSuiteDoc } from '../yjs/index.js';
import BlockTag = BlockSuiteInternal.BlockTag;
import TagSchema = BlockSuiteInternal.TagSchema;
import type { AwarenessStore } from '../awareness.js';
import type { BlockTag, TagSchema } from '@blocksuite/global/database';
export type YBlock = Y.Map<unknown>;

@@ -62,2 +61,3 @@ export type YBlocks = Y.Map<YBlock>;

get canRedo(): boolean;
get YText(): typeof Y.YText;
get Text(): typeof Text;

@@ -70,3 +70,2 @@ undo: () => void;

updateBlockTag<Tag extends BlockTag>(id: BaseBlockModel['id'], tag: Tag): void;
getBlockTags(model: BaseBlockModel): Record<string, BlockTag>;
getBlockTagByTagSchema(model: BaseBlockModel, schema: TagSchema): BlockTag | null;

@@ -73,0 +72,0 @@ getTagSchema(id: TagSchema['id']): TagSchema | null;

@@ -161,2 +161,5 @@ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {

}
get YText() {
return Y.Text;
}
get Text() {

@@ -178,17 +181,20 @@ return Text;

}
tags.set(tag.type, tag);
// Related issue: https://github.com/yjs/yjs/issues/255
const tagMap = new Y.Map();
tagMap.set('schemaId', tag.schemaId);
tagMap.set('value', tag.value);
tags.set(tag.schemaId, tagMap);
});
}
getBlockTags(model) {
getBlockTagByTagSchema(model, schema) {
const tags = this.tags.get(model.id);
if (!tags) {
return {};
const tagMap = tags?.get(schema.id) ?? null;
if (!tagMap) {
return null;
}
// fixme: performance issue
return tags.toJSON();
return {
schemaId: tagMap.get('schemaId'),
value: tagMap.get('value'),
};
}
getBlockTagByTagSchema(model, schema) {
const tags = this.tags.get(model.id);
return tags?.get(schema.id) ?? null;
}
getTagSchema(id) {

@@ -277,2 +283,6 @@ return (this.tagSchema.get(id) ?? null);

}
if (!this.awarenessStore.getFlag('enable_database') &&
flavour === 'affine:database') {
throw new Error('database is not enabled');
}
const clonedProps = { flavour, ...blockProps };

@@ -535,4 +545,3 @@ const id = this._idGenerator();

const yText = yBlock.get('prop:text');
const text = new Text(this, yText);
model.text = text;
model.text = new Text(yText);
if (model.flavour === 'affine:page') {

@@ -684,2 +693,11 @@ model.tags = yBlock.get('meta:tags');

}
else {
if (event.path.includes('meta:tags')) {
// todo: refactor here
const blockId = event.path[2];
const block = this.getBlockById(blockId);
assertExists(block);
block.propsUpdated.emit();
}
}
}

@@ -686,0 +704,0 @@ _handleVersion() {

@@ -10,3 +10,3 @@ /// <reference types="@blocksuite/global" />

import type { BlockSuiteDoc } from '../yjs/index.js';
import type { AwarenessStore } from '../awareness.js';
import { AwarenessStore } from '../awareness.js';
import type { z } from 'zod';

@@ -13,0 +13,0 @@ import { BlockSchema } from '../base.js';

@@ -7,3 +7,4 @@ import * as Y from 'yjs';

import { Indexer } from './search.js';
import { getBlobStorage, } from '../persistence/blob/index.js';
import { getBlobStorage, BlobSyncState, } from '../persistence/blob/index.js';
import { BlobUploadState } from '../awareness.js';
import { BlockSchema } from '../base.js';

@@ -172,2 +173,18 @@ class WorkspaceMeta extends Space {

});
this._blobStorage.then(blobStorage => {
blobStorage?.signals.onBlobSyncStateChange.on(state => {
const blobId = state.id;
const syncState = state.state;
if (syncState === BlobSyncState.Waiting ||
syncState === BlobSyncState.Syncing) {
this.awarenessStore.setBlobState(blobId, BlobUploadState.Uploading);
return;
}
if (syncState === BlobSyncState.Success ||
syncState === BlobSyncState.Failed) {
this.awarenessStore.setBlobState(blobId, BlobUploadState.Uploaded);
return;
}
});
});
}

@@ -174,0 +191,0 @@ else {

{
"name": "@blocksuite/store",
"version": "0.4.0-20230201220903-262005c",
"version": "0.4.0-20230203001426-a8854f1",
"description": "BlockSuite data store built for general purpose state management.",

@@ -11,3 +11,3 @@ "main": "dist/index.js",

"dependencies": {
"@blocksuite/global": "0.4.0-20230201220903-262005c",
"@blocksuite/global": "0.4.0-20230203001426-a8854f1",
"@types/flexsearch": "^0.7.3",

@@ -14,0 +14,0 @@ "buffer": "^6.0.3",

@@ -423,3 +423,2 @@ /* eslint-disable @typescript-eslint/no-restricted-imports */

text: new page.Text(
page,
'英特尔第13代酷睿i7-1370P移动处理器现身Geekbench,14核心和5GHz'

@@ -431,3 +430,2 @@ ),

text: new page.Text(
page,
'索尼考虑移植《GT赛车7》,又一PlayStation独占IP登陆PC平台'

@@ -434,0 +432,0 @@ ),

@@ -45,2 +45,9 @@ import * as Y from 'yjs';

response?: Response[];
/**
* After insert a blob block with cloud sync,
* uploading will trigger automatically,
* blob id will add to this property,
* and will remove after finish.
*/
blobUploading?: string[];
};

@@ -56,2 +63,7 @@

export enum BlobUploadState {
Uploading = 0,
Uploaded = 1,
}
export class AwarenessStore<

@@ -114,2 +126,43 @@ Flags extends Record<string, unknown> = BlockSuiteFlags

setBlobState(blobId: string, state: BlobUploadState) {
const uploading = [
...(this.awareness.getLocalState()?.blobUploading ?? []),
];
if (state === BlobUploadState.Uploading) {
if (!uploading.includes(blobId)) {
uploading.push(blobId);
}
} else if (state === BlobUploadState.Uploaded) {
const position = uploading.findIndex(id => id === blobId);
if (position > -1) {
uploading.splice(position, 1);
}
}
this.awareness.setLocalStateField('blobUploading', uploading);
}
getBlobState(blobId: string) {
// FIXME: if clientA and clientB both upload a same image,
// both clients could not show image correctly.
const found = [...this.awareness.getStates().entries()].find(
([clientId, state]) => {
// assume local blob always exist, because we cache it in indexedDB
if (clientId === this.awareness.clientID) {
return;
}
const uploading: string[] = state?.blobUploading ?? [];
return uploading.includes(blobId);
}
);
return found ? BlobUploadState.Uploading : BlobUploadState.Uploaded;
}
isBlobUploading(blobId: string) {
return this.getBlobState(blobId) === BlobUploadState.Uploading;
}
setRemoteFlag<Key extends keyof Flags>(

@@ -116,0 +169,0 @@ clientId: number,

@@ -100,2 +100,3 @@ import type { Page } from './workspace/index.js';

// TODO: separate from model
parentIndex?: number;

@@ -102,0 +103,0 @@ depth?: number;

@@ -50,5 +50,5 @@ // Test page entry located in playground/examples/blob/index.html

storage.signals.blobAdded.on(id => {
storage.signals.onBlobSyncStateChange.on(state => {
called = true;
idCalled = id;
idCalled = state.id;
});

@@ -99,3 +99,4 @@

const id = await storage.set(blob);
return id !== null && storage.blobs.has(id);
const blobs = await storage.blobs;
return id !== null && blobs.includes(id);
});

@@ -111,3 +112,4 @@

testSerial('can get saved blob', async () => {
const id = storage.blobs.values().next().value;
const blobs = await storage.blobs;
const id = blobs[0];
const url = await storage.get(id);

@@ -120,3 +122,3 @@ assertExists(url);

const isCorrectColor = assertColor(img, 100, 100, [193, 193, 193]);
return storage.blobs.size === 1 && isCorrectColor;
return blobs.length === 1 && isCorrectColor;
});

@@ -161,3 +163,4 @@

console.log(id);
const ret = id !== null && storage.blobs.has(id);
const blobs = await storage.blobs;
const ret = id !== null && blobs.includes(id);

@@ -186,3 +189,4 @@ return ret;

const isCorrectColor = assertColor(img, 100, 100, [193, 193, 193]);
const ret = storage.blobs.size === 1 && isCorrectColor;
const blobs = await storage.blobs;
const ret = blobs.length === 1 && isCorrectColor;

@@ -189,0 +193,0 @@ return ret;

@@ -0,49 +1,34 @@

import ky from 'ky';
import { Signal, sleep } from '@blocksuite/global/utils';
import ky from 'ky';
import type {
BlobId,
IDBInstance,
PendingTask,
SyncTask,
BlobProvider,
} from './types.js';
import { BlobSyncState } from './types.js';
import type { BlobOptionsGetter } from './duplex-provider.js';
import type { BlobId, BlobURL, IDBInstance } from './types.js';
import { getDatabase } from './utils.js';
const RETRY_TIMEOUT = 500;
type SyncTask = {
id: BlobId;
blob: ArrayBufferLike | undefined;
type: 'add' | 'delete';
retry: number;
};
type BlobStatus = {
status: number;
};
export class CloudSyncManager {
private readonly _abortController = new AbortController();
private readonly _db: IDBInstance<PendingTask>;
private readonly _fetcher: typeof ky;
private readonly _database: IDBInstance;
private readonly _pending: IDBInstance<{
retry: number;
type: 'add' | 'delete' | 'upload';
}>;
private readonly _pendingPipeline: SyncTask[] = [];
private readonly _workspace: string;
private readonly _onUploadFinished: Signal<string>;
private _pipeline: SyncTask[] = [];
private initialized = false;
private _uploadingIds: Set<string> = new Set();
private _onUploadStateChanged: Signal<boolean>;
private _failed: SyncTask[] = [];
private _whenReady!: Promise<void>;
private _running = false;
constructor(
workspace: string,
blobOptionsGetter: BlobOptionsGetter,
db: IDBInstance,
onUploadStateChanged: Signal<boolean>,
onUploadFinished: Signal<BlobId>
) {
this._onUploadFinished = onUploadFinished;
this._onUploadStateChanged = onUploadStateChanged;
private _workspace!: string;
readonly signals: BlobProvider['signals'] = {
onBlobSyncStateChange: new Signal(),
};
constructor(workspace: string, blobOptionsGetter: BlobOptionsGetter) {
this._workspace = workspace;
this._fetcher = ky.create({
prefixUrl: blobOptionsGetter('api'),
signal: this._abortController.signal,
throwHttpErrors: false,

@@ -62,115 +47,108 @@ hooks: {

this._database = db;
this._pending = getDatabase('pending', workspace);
this._workspace = workspace;
this._db = getDatabase<PendingTask>('pending', this._workspace);
this._whenReady = this._db.values().then(tasks => {
this._pipeline.push(
...tasks.map(task => ({
...task,
retry: 0,
// force retry failed task
failed: false,
}))
);
this._pending.keys().then(async keys => {
this._pipeline = (
await Promise.all(
keys.map(async id => {
const { retry = 0, type } = (await this._pending.get(id)) || {};
const blob = await db.get(id);
if ((blob || type === 'delete') && type !== 'upload') {
return { id, blob, retry, type };
}
if (blob && type === 'upload') {
console.log('try resume uploading blob:', id);
this.addTask(id, 'add');
}
return undefined;
})
)
).filter((v): v is SyncTask => !!v);
this.initialized = true;
this._pendingPipeline.forEach(task => this._pipeline.push(task));
this._pendingPipeline.length = 0;
this._taskRunner();
this._runTasks();
});
}
private async _handleTaskRetry(task: SyncTask, response?: BlobStatus) {
this._removeUploadId(task.id);
if (
response?.status === 413 ||
response?.status === 200 ||
task.retry >= 10
) {
// if blob is too large, may try to upload it forever
if (response?.status === 413) {
console.log('blob too large:', task.id);
}
await this._pending.delete(task.id);
this._onUploadFinished.emit(task.id);
} else {
await this._pending.set(task.id, {
type: task.type,
retry: task.retry + 1,
});
this._pipeline.push({ ...task, retry: task.retry + 1 });
await sleep(Math.min(10, task.retry) * 100);
}
get running() {
return this._running;
}
private async _taskRunner() {
const signal = this._abortController.signal;
private async _runTasks() {
if (this._running) {
return;
}
this._running = true;
while (this._pipeline.length) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const task = this._pipeline.shift()!;
while (!signal.aborted) {
let task: SyncTask | undefined;
while (
typeof (task = this._pipeline.shift()) !== 'undefined' &&
!signal.aborted
) {
try {
const resp = await this._fetcher.head(
`${this._workspace}/blob/${task.id}`
);
if (resp.status === 404) {
await this._pending.set(task.id, {
type: 'upload',
retry: task.retry,
try {
const resp = await this._fetcher.head(
`${this._workspace}/blob/${task.id}`
);
if (resp.status === 404) {
this.signals.onBlobSyncStateChange.emit({
id: task.id,
state: BlobSyncState.Syncing,
});
const response = await this._fetcher.put(`${this._workspace}/blob`, {
body: task.blob,
});
if (response.status === 200) {
this.signals.onBlobSyncStateChange.emit({
id: task.id,
state: BlobSyncState.Success,
});
this._addUploadId(task.id);
const response = await this._fetcher.put(
`${this._workspace}/blob`,
{
body: task.blob,
retry: task.retry,
}
);
await this._handleTaskRetry(task, response);
await this._db.delete(task.id);
continue;
}
} catch (e) {
console.warn('Error while syncing blob', e);
if (e) await this._handleTaskRetry(task);
this._taskRunner();
if (response.status === 413 || task.retry >= 10) {
if (response.status === 413) {
console.log('blob too large:', task.id);
}
await this._db.set(task.id, { ...task, failed: true });
this._failed.push({ ...task, failed: true });
this.signals.onBlobSyncStateChange.emit({
id: task.id,
state: BlobSyncState.Failed,
});
continue;
}
this._pipeline.push({
...task,
retry: task.retry + 1,
});
}
} catch (e) {
console.warn('Error while syncing blob', e);
this.signals.onBlobSyncStateChange.emit({
id: task.id,
state: BlobSyncState.Failed,
});
}
await sleep(RETRY_TIMEOUT);
// task interval
await sleep(500);
}
console.error('CloudSyncManager taskRunner exited');
this._running = false;
}
private _addUploadId(id: BlobId) {
this._uploadingIds.add(id);
this._onUploadStateChanged.emit(Boolean(this._uploadingIds.size));
async addTask(id: BlobId, blob: Blob) {
await this._whenReady;
const buffer = await blob.arrayBuffer();
await this._db.set(id, { id, blob: buffer });
this._pipeline.push({
id,
blob: buffer,
retry: 0,
});
this._runTasks();
this.signals.onBlobSyncStateChange.emit({
id,
state: BlobSyncState.Waiting,
});
return;
}
private _removeUploadId(id: BlobId) {
this._uploadingIds.delete(id);
this._onUploadStateChanged.emit(Boolean(this._uploadingIds.size));
}
async get(id: BlobId): Promise<BlobURL | null> {
async get(id: BlobId): Promise<Blob | null> {
const endpoint = `${this._workspace}/blob/${id}`;
try {
const blob = await this._fetcher
return await this._fetcher
.get(endpoint, { throwHttpErrors: true })
.blob();
await this._database.set(id, await blob.arrayBuffer());
return URL.createObjectURL(blob);
} catch (e) {

@@ -181,17 +159,2 @@ console.error('Error while getting blob', e);

}
async addTask(id: BlobId, type: 'add' | 'delete') {
const blob = await this._database.get(id);
if (blob || type === 'delete') {
if (this.initialized) {
this._pipeline.push({ id, blob, type, retry: 0 });
} else {
this._pendingPipeline.push({ id, blob, type, retry: 0 });
}
console.log(this._pipeline, this._pendingPipeline);
} else {
console.error('Blob not found', id);
}
}
}
import { assertExists, Signal } from '@blocksuite/global/utils';
import { CloudSyncManager } from './cloud-sync-manager.js';
import type { BlobId, BlobProvider, BlobURL, IDBInstance } from './types.js';
import type {
BlobId,
BlobProvider,
BlobURL,
IDBInstance,
BlobSyncStateChangeEvent,
} from './types.js';
import { getDatabase, sha } from './utils.js';

@@ -30,10 +36,5 @@

private readonly _cloudManager?: CloudSyncManager;
private _uploading = false;
readonly blobs: Set<string> = new Set();
readonly signals = {
blobAdded: new Signal<BlobId>(),
blobDeleted: new Signal<BlobId>(),
uploadStateChanged: new Signal<boolean>(),
uploadFinished: new Signal<BlobId>(),
onBlobSyncStateChange: new Signal<BlobSyncStateChangeEvent>(),
};

@@ -46,15 +47,5 @@

const provider = new DuplexBlobProvider(workspace, optionsGetter);
await provider._initBlobs();
return provider;
}
private async _initBlobs() {
const entries = await this._localDB.keys();
for (const key of entries) {
const blobId = key as BlobId;
this.signals.blobAdded.emit(blobId);
this.blobs.add(blobId);
}
}
private constructor(workspace: string, optionsGetter?: BlobOptionsGetter) {

@@ -66,12 +57,7 @@ this._localDB = getDatabase('blob', workspace);

assertExists(optionsGetter);
this.signals.uploadStateChanged.on(uploading => {
this._uploading = uploading;
this._cloudManager = new CloudSyncManager(workspace, optionsGetter);
this._cloudManager.signals.onBlobSyncStateChange.on(blobState => {
this.signals.onBlobSyncStateChange.emit(blobState);
});
this._cloudManager = new CloudSyncManager(
workspace,
optionsGetter,
this._localDB,
this.signals.uploadStateChanged,
this.signals.uploadFinished
);
}

@@ -81,14 +67,18 @@ }

get uploading() {
return this._uploading;
return Boolean(this._cloudManager?.running);
}
get blobs() {
return this._localDB.keys();
}
async get(id: BlobId): Promise<BlobURL | null> {
const blob = await this._localDB.get(id);
if (!blob) {
const blob = this._cloudManager?.get(id);
const blob = await this._cloudManager?.get(id);
if (blob) {
this.signals.blobAdded.emit(id);
this.blobs.add(id);
const buffer = await blob.arrayBuffer();
await this._localDB.set(id, buffer);
return blob;
return URL.createObjectURL(blob);
}

@@ -107,8 +97,5 @@ return null;

await this._localDB.set(hash, buffer);
this.signals.blobAdded.emit(hash);
this.blobs.add(hash);
}
this._cloudManager?.addTask(hash, 'add');
this._cloudManager?.addTask(hash, blob);

@@ -121,5 +108,4 @@ return hash;

this.signals.blobDeleted.emit(id);
this.blobs.delete(id);
this._cloudManager?.addTask(id, 'delete');
// NOTE: should we delete blob in cloud? When?
// this._cloudManager?.addTask(id, 'delete');
}

@@ -129,4 +115,3 @@

await this._localDB.clear();
this.blobs.clear();
}
}

@@ -16,3 +16,3 @@ import { DuplexBlobProvider } from './duplex-provider.js';

const provider = await DuplexBlobProvider.init(workspace, optionsGetter);
storage.addProvider(provider);
storage.setProvider(provider);

@@ -26,1 +26,2 @@ return storage;

export type { BlobOptionsGetter, BlobOptions };
export { BlobSyncState } from './types.js';
import { Signal } from '@blocksuite/global/utils';
import type { BlobId, BlobProvider, BlobURL } from './types.js';
import type {
BlobId,
BlobProvider,
BlobURL,
BlobSyncStateChangeEvent,
} from './types.js';
function assertProviderExist(
provider: BlobProvider | null | undefined
): asserts provider is BlobProvider {
if (!provider) {
throw new Error('No provider found for blob storage');
}
}
export class BlobStorage {
private _providers: BlobProvider[] = [];
private _provider: BlobProvider | null = null;
signals = {
blobAdded: new Signal<BlobId>(),
uploadStateChanged: new Signal<boolean>(),
onBlobSyncStateChange: new Signal<BlobSyncStateChangeEvent>(),
};
get uploading(): boolean {
return this._providers.some(p => p.uploading);
return this._provider?.uploading ?? true;
}
get providers(): Readonly<BlobProvider[]> {
return this._providers;
get blobs() {
return this._provider?.blobs ?? Promise.resolve([]);
}
get blobs(): Set<BlobId> {
// merge all blobs from all providers
const result = new Set<BlobId>();
for (const provider of this._providers) {
for (const blob of provider.blobs) {
result.add(blob);
}
setProvider(provider: BlobProvider | null) {
if (!provider) {
this._provider = null;
return;
}
return result;
}
addProvider(provider: BlobProvider) {
this._providers.push(provider);
provider.signals.blobAdded.on(blobId => {
this.signals.blobAdded.emit(blobId);
this._provider = provider;
this._provider.signals.onBlobSyncStateChange.on(state => {
this.signals.onBlobSyncStateChange.emit(state);
});
provider.signals.uploadStateChanged?.on(() => {
this.signals.uploadStateChanged.emit(this.uploading);
});
}
removeProvider(provider: BlobProvider) {
this._providers = this._providers.filter(p => p !== provider);
}
async get(id: BlobId): Promise<BlobURL | null> {
assertProviderExist(this._provider);
async get(id: BlobId): Promise<BlobURL | null> {
for (const provider of this._providers) {
try {
return await provider.get(id);
} catch (e) {
console.warn(e);
}
}
throw new Error(`No provider found for blob ${id}`);
return await this._provider.get(id);
}
async set(blob: Blob): Promise<BlobId> {
let result: BlobId | null = null;
for (const provider of this._providers) {
try {
result = await provider.set(blob);
} catch (e) {
console.warn(e);
}
}
assertProviderExist(this._provider);
if (result === null) throw new Error('No provider found for blob');
return result;
return await this._provider.set(blob);
}
async delete(id: BlobId): Promise<void> {
for (const provider of this._providers) {
try {
await provider.delete(id);
} catch (e) {
console.warn(e);
}
}
assertProviderExist(this._provider);
await this._provider.delete(id);
}
async clear(): Promise<void> {
for (const provider of this._providers) {
try {
await provider.clear();
} catch (e) {
console.warn(e);
}
}
assertProviderExist(this._provider);
await this._provider.clear();
}
}

@@ -6,8 +6,19 @@ import type { Signal } from '@blocksuite/global/utils';

export enum BlobSyncState {
Waiting,
Syncing,
Success,
Failed,
}
export interface BlobSyncStateChangeEvent {
id: BlobId;
state: BlobSyncState;
}
export interface BlobProvider {
blobs: Set<BlobId>;
uploading: boolean;
blobs: Promise<string[]>;
signals: {
blobAdded: Signal<BlobId>;
uploadStateChanged?: Signal<boolean>;
onBlobSyncStateChange: Signal<BlobSyncStateChangeEvent>;
};

@@ -20,2 +31,12 @@ get(id: BlobId): Promise<BlobURL | null>;

export interface PendingTask {
id: BlobId;
blob: ArrayBufferLike | undefined;
failed?: boolean;
}
export interface SyncTask extends PendingTask {
retry: number;
}
export type IDBInstance<T = ArrayBufferLike> = {

@@ -26,4 +47,5 @@ get: (key: BlobId) => Promise<T | undefined>;

keys: () => Promise<BlobId[]>;
values: () => Promise<T[]>;
delete: (key: BlobId) => Promise<void>;
clear: () => Promise<void>;
};

@@ -1,2 +0,2 @@

import { createStore, del, get, keys, set, clear } from 'idb-keyval';
import { createStore, del, get, keys, set, clear, values } from 'idb-keyval';
import type { IDBInstance } from './types.js';

@@ -22,2 +22,3 @@ import { Buffer } from 'buffer';

keys: () => keys(db),
values: () => values(db),
delete: (key: string) => del(key, db),

@@ -24,0 +25,0 @@ clear: () => clear(db),

@@ -116,4 +116,7 @@ /* eslint-disable @typescript-eslint/no-explicit-any */

export class Text {
private _space: Space;
private _yText: Y.Text;
/**
* @internal
*/
public delayedJobs: (() => void)[] = [];

@@ -123,4 +126,3 @@ // TODO toggle transact by options

constructor(space: Space, input: Y.Text | string) {
this._space = space;
constructor(input: Y.Text | string) {
if (typeof input === 'string') {

@@ -133,5 +135,14 @@ this._yText = new Y.Text(input);

static fromDelta(space: Space, delta: DeltaOperation[]) {
const result = new Text(space, '');
result.applyDelta(delta);
/**
* @internal
*/
public doDelayedJobs() {
this.delayedJobs.forEach(cb => cb());
this.delayedJobs = [];
}
static fromDelta(delta: DeltaOperation[]) {
const result = new Text('');
// In the first time, yDoc does not exist.
result.delayedJobs.push(() => result.applyDelta(delta));
return result;

@@ -145,12 +156,17 @@ }

private _transact(callback: () => void) {
if (this._space.awarenessStore.isReadonly(this._space)) {
console.error('cannot modify data in readonly mode');
return;
if (this._shouldTransact) {
const doc = this._yText.doc;
if (!doc) {
throw new Error('cannot find doc');
}
doc.transact(() => {
callback();
}, doc.clientID);
} else {
callback();
}
const { _space, _shouldTransact } = this;
_shouldTransact ? _space.transact(callback) : callback();
}
clone() {
return new Text(this._space, this._yText.clone());
return new Text(this._yText.clone());
}

@@ -157,0 +173,0 @@

@@ -92,2 +92,3 @@ import * as Y from 'yjs';

yBlock.set('prop:text', text._yText);
text.doDelayedJobs();
return;

@@ -94,0 +95,0 @@ }

@@ -26,5 +26,4 @@ import * as Y from 'yjs';

import { debug } from '@blocksuite/global/debug';
import BlockTag = BlockSuiteInternal.BlockTag;
import TagSchema = BlockSuiteInternal.TagSchema;
import type { AwarenessStore } from '../awareness.js';
import type { BlockTag, TagSchema } from '@blocksuite/global/database';
export type YBlock = Y.Map<unknown>;

@@ -159,2 +158,6 @@ export type YBlocks = Y.Map<YBlock>;

get YText() {
return Y.Text;
}
get Text() {

@@ -201,15 +204,10 @@ return Text;

}
tags.set(tag.type, tag);
// Related issue: https://github.com/yjs/yjs/issues/255
const tagMap = new Y.Map();
tagMap.set('schemaId', tag.schemaId);
tagMap.set('value', tag.value);
tags.set(tag.schemaId, tagMap);
});
}
getBlockTags(model: BaseBlockModel): Record<string, BlockTag> {
const tags = this.tags.get(model.id);
if (!tags) {
return {};
}
// fixme: performance issue
return tags.toJSON();
}
getBlockTagByTagSchema(

@@ -220,3 +218,10 @@ model: BaseBlockModel,

const tags = this.tags.get(model.id);
return (tags?.get(schema.id) as BlockTag) ?? null;
const tagMap = (tags?.get(schema.id) as Y.Map<unknown>) ?? null;
if (!tagMap) {
return null;
}
return {
schemaId: tagMap.get('schemaId') as string,
value: tagMap.get('value') as unknown,
};
}

@@ -339,2 +344,8 @@

}
if (
!this.awarenessStore.getFlag('enable_database') &&
flavour === 'affine:database'
) {
throw new Error('database is not enabled');
}

@@ -712,4 +723,3 @@ const clonedProps: Partial<BlockProps> = { flavour, ...blockProps };

const yText = yBlock.get('prop:text') as Y.Text;
const text = new Text(this, yText);
model.text = text;
model.text = new Text(yText);
if (model.flavour === 'affine:page') {

@@ -876,2 +886,10 @@ model.tags = yBlock.get('meta:tags') as Y.Map<Y.Map<unknown>>;

}
} else {
if (event.path.includes('meta:tags')) {
// todo: refactor here
const blockId = event.path[2] as string;
const block = this.getBlockById(blockId);
assertExists(block);
block.propsUpdated.emit();
}
}

@@ -878,0 +896,0 @@ }

@@ -11,5 +11,6 @@ import * as Y from 'yjs';

getBlobStorage,
BlobSyncState,
} from '../persistence/blob/index.js';
import type { BlockSuiteDoc } from '../yjs/index.js';
import type { AwarenessStore } from '../awareness.js';
import { AwarenessStore, BlobUploadState } from '../awareness.js';
import type { z } from 'zod';

@@ -251,2 +252,23 @@ import { BlockSchema } from '../base.js';

});
this._blobStorage.then(blobStorage => {
blobStorage?.signals.onBlobSyncStateChange.on(state => {
const blobId = state.id;
const syncState = state.state;
if (
syncState === BlobSyncState.Waiting ||
syncState === BlobSyncState.Syncing
) {
this.awarenessStore.setBlobState(blobId, BlobUploadState.Uploading);
return;
}
if (
syncState === BlobSyncState.Success ||
syncState === BlobSyncState.Failed
) {
this.awarenessStore.setBlobState(blobId, BlobUploadState.Uploaded);
return;
}
});
});
} else {

@@ -253,0 +275,0 @@ // blob storage is not reachable in server side

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc