chore: remove unused y-indexeddb (#6728)

This commit is contained in:
EYHN 2024-04-29 09:14:41 +00:00
parent 9c8168a066
commit 20116eb940
No known key found for this signature in database
GPG Key ID: 46C9E26A75AB276C
30 changed files with 52 additions and 2257 deletions

View File

@ -52,7 +52,6 @@ const allPackages = [
'packages/common/env',
'packages/common/infra',
'packages/common/theme',
'packages/common/y-indexeddb',
'tools/cli',
];

5
.github/labeler.yml vendored
View File

@ -69,11 +69,6 @@ rust:
- '**/rust-toolchain.toml'
- '**/rustfmt.toml'
package:y-indexeddb:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/y-indexeddb/**/*'
app:core:
- changed-files:
- any-glob-to-any-file:

View File

@ -110,11 +110,10 @@ If you have questions, you are welcome to contact us. One of the best places to
## Ecosystem
| Name | | |
| -------------------------------------------------------- | ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | ![](https://img.shields.io/codecov/c/github/toeverything/affine?style=flat-square) |
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [![](https://img.shields.io/npm/dm/@toeverything/y-indexeddb?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
| Name | | |
| ------------------------------------------------ | -------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | ![](https://img.shields.io/codecov/c/github/toeverything/affine?style=flat-square) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
## Upstreams

View File

@ -1 +0,0 @@
lib

View File

@ -1,38 +0,0 @@
# @toeverything/y-indexeddb
## Features
- persistence data in indexeddb
- sub-documents support
- fully TypeScript
## Usage
```ts
import { createIndexedDBProvider, downloadBinary } from '@toeverything/y-indexeddb';
import * as Y from 'yjs';
const yDoc = new Y.Doc({
// we use `guid` as unique key
guid: 'my-doc',
});
// sync yDoc with indexedDB
const provider = createIndexedDBProvider(yDoc);
provider.connect();
await provider.whenSynced.then(() => {
console.log('synced');
provider.disconnect();
});
// dowload binary data from indexedDB for once
downloadBinary(yDoc.guid).then(blob => {
if (blob !== false) {
Y.applyUpdate(yDoc, blob);
}
});
```
## LICENSE
[MIT](https://github.com/toeverything/AFFiNE/blob/canary/LICENSE-MIT)

View File

@ -1,53 +0,0 @@
{
"name": "@toeverything/y-indexeddb",
"type": "module",
"version": "0.14.0",
"description": "IndexedDB database adapter for Yjs",
"repository": "toeverything/AFFiNE",
"author": "toeverything",
"license": "MIT",
"keywords": [
"indexeddb",
"yjs",
"yjs-adapter"
],
"scripts": {
"build": "vite build"
},
"files": [
"dist"
],
"exports": {
".": "./src/index.ts"
},
"publishConfig": {
"access": "public",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js",
"require": "./dist/index.cjs",
"default": "./dist/index.umd.cjs"
}
}
},
"dependencies": {
"@blocksuite/global": "0.14.0-canary-202404280529-c8e5f89",
"idb": "^8.0.0",
"nanoid": "^5.0.7",
"y-provider": "workspace:*"
},
"devDependencies": {
"@blocksuite/blocks": "0.14.0-canary-202404280529-c8e5f89",
"@blocksuite/store": "0.14.0-canary-202404280529-c8e5f89",
"fake-indexeddb": "^5.0.2",
"vite": "^5.2.8",
"vite-plugin-dts": "3.8.1",
"vitest": "1.4.0",
"y-indexeddb": "^9.0.12",
"yjs": "^13.6.14"
},
"peerDependencies": {
"yjs": "^13"
}
}

View File

@ -1,21 +0,0 @@
{
"name": "y-indexeddb",
"$schema": "../../../node_modules/nx/schemas/project-schema.json",
"projectType": "library",
"sourceRoot": "packages/common/y-indexeddb/src",
"targets": {
"build": {
"executor": "@nx/vite:build",
"options": {
"outputPath": "packages/common/y-indexeddb/dist"
}
},
"serve": {
"executor": "@nx/vite:build",
"options": {
"outputPath": "packages/common/y-indexeddb/dist",
"watch": true
}
}
}
}

View File

@ -1,495 +0,0 @@
/**
* @vitest-environment happy-dom
*/
import 'fake-indexeddb/auto';
import { setTimeout } from 'node:timers/promises';
import { AffineSchemas } from '@blocksuite/blocks/schemas';
import { assertExists } from '@blocksuite/global/utils';
import type { Doc } from '@blocksuite/store';
import { DocCollection, Schema } from '@blocksuite/store';
import { openDB } from 'idb';
import { nanoid } from 'nanoid';
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
import { applyUpdate, Doc as YDoc, encodeStateAsUpdate } from 'yjs';
import type { WorkspacePersist } from '../index';
import {
createIndexedDBProvider,
dbVersion,
DEFAULT_DB_NAME,
downloadBinary,
getMilestones,
markMilestone,
overwriteBinary,
revertUpdate,
setMergeCount,
} from '../index';
function initEmptyPage(page: Doc) {
const pageBlockId = page.addBlock(
'affine:page' as keyof BlockSuite.BlockModels,
{
title: new page.Text(''),
}
);
const surfaceBlockId = page.addBlock(
'affine:surface' as keyof BlockSuite.BlockModels,
{},
pageBlockId
);
const frameBlockId = page.addBlock(
'affine:note' as keyof BlockSuite.BlockModels,
{},
pageBlockId
);
const paragraphBlockId = page.addBlock(
'affine:paragraph' as keyof BlockSuite.BlockModels,
{},
frameBlockId
);
return {
pageBlockId,
surfaceBlockId,
frameBlockId,
paragraphBlockId,
};
}
async function getUpdates(id: string): Promise<Uint8Array[]> {
const db = await openDB(rootDBName, dbVersion);
const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const data = (await store.get(id)) as WorkspacePersist | undefined;
assertExists(data, 'data should not be undefined');
expect(data.id).toBe(id);
return data.updates.map(({ update }) => update);
}
let id: string;
let docCollection: DocCollection;
const rootDBName = DEFAULT_DB_NAME;
const schema = new Schema();
schema.register(AffineSchemas);
beforeEach(() => {
id = nanoid();
docCollection = new DocCollection({
id,
schema,
});
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
});
afterEach(() => {
indexedDB.deleteDatabase('affine-local');
localStorage.clear();
});
describe('indexeddb provider', () => {
test('connect', async () => {
const provider = createIndexedDBProvider(docCollection.doc);
provider.connect();
// todo: has a better way to know when data is synced
await setTimeout(200);
const db = await openDB(rootDBName, dbVersion);
{
const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const data = await store.get(id);
expect(data).toEqual({
id,
updates: [
{
timestamp: expect.any(Number),
update: encodeStateAsUpdate(docCollection.doc),
},
],
});
const page = docCollection.createDoc({ id: 'page0' });
page.load();
const pageBlockId = page.addBlock(
'affine:page' as keyof BlockSuite.BlockModels,
{}
);
const frameId = page.addBlock(
'affine:note' as keyof BlockSuite.BlockModels,
{},
pageBlockId
);
page.addBlock(
'affine:paragraph' as keyof BlockSuite.BlockModels,
{},
frameId
);
}
await setTimeout(200);
{
const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const data = (await store.get(id)) as WorkspacePersist | undefined;
assertExists(data);
expect(data.id).toBe(id);
const testWorkspace = new DocCollection({
id: 'test',
schema,
});
// data should only contain updates for the root doc
data.updates.forEach(({ update }) => {
DocCollection.Y.applyUpdate(testWorkspace.doc, update);
});
const subPage = testWorkspace.doc.spaces.get('page0');
{
assertExists(subPage);
await store.get(subPage.guid);
const data = (await store.get(subPage.guid)) as
| WorkspacePersist
| undefined;
assertExists(data);
testWorkspace.getDoc('page0')?.load();
data.updates.forEach(({ update }) => {
DocCollection.Y.applyUpdate(subPage, update);
});
}
expect(docCollection.doc.toJSON()).toEqual(testWorkspace.doc.toJSON());
}
});
test('connect and disconnect', async () => {
const provider = createIndexedDBProvider(docCollection.doc, rootDBName);
provider.connect();
expect(provider.connected).toBe(true);
await setTimeout(200);
const snapshot = encodeStateAsUpdate(docCollection.doc);
provider.disconnect();
expect(provider.connected).toBe(false);
{
const page = docCollection.createDoc({ id: 'page0' });
page.load();
const pageBlockId = page.addBlock(
'affine:page' as keyof BlockSuite.BlockModels
);
const frameId = page.addBlock(
'affine:note' as keyof BlockSuite.BlockModels,
{},
pageBlockId
);
page.addBlock(
'affine:paragraph' as keyof BlockSuite.BlockModels,
{},
frameId
);
}
{
const updates = await getUpdates(docCollection.id);
expect(updates.length).toBe(1);
expect(updates[0]).toEqual(snapshot);
}
expect(provider.connected).toBe(false);
provider.connect();
expect(provider.connected).toBe(true);
await setTimeout(200);
{
const updates = await getUpdates(docCollection.id);
expect(updates).not.toEqual([]);
}
expect(provider.connected).toBe(true);
provider.disconnect();
expect(provider.connected).toBe(false);
});
test('cleanup', async () => {
const provider = createIndexedDBProvider(docCollection.doc);
provider.connect();
await setTimeout(200);
const db = await openDB(rootDBName, dbVersion);
{
const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const keys = await store.getAllKeys();
expect(keys).contain(docCollection.id);
}
await provider.cleanup();
provider.disconnect();
{
const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const keys = await store.getAllKeys();
expect(keys).not.contain(docCollection.id);
}
});
test('merge', async () => {
setMergeCount(5);
const provider = createIndexedDBProvider(docCollection.doc, rootDBName);
provider.connect();
{
const page = docCollection.createDoc({ id: 'page0' });
page.load();
const pageBlockId = page.addBlock(
'affine:page' as keyof BlockSuite.BlockModels
);
const frameId = page.addBlock(
'affine:note' as keyof BlockSuite.BlockModels,
{},
pageBlockId
);
for (let i = 0; i < 99; i++) {
page.addBlock(
'affine:paragraph' as keyof BlockSuite.BlockModels,
{},
frameId
);
}
}
await setTimeout(200);
{
const updates = await getUpdates(id);
expect(updates.length).lessThanOrEqual(5);
}
});
test("data won't be lost", async () => {
const doc = new DocCollection.Y.Doc();
const map = doc.getMap('map');
for (let i = 0; i < 100; i++) {
map.set(`${i}`, i);
}
{
const provider = createIndexedDBProvider(doc, rootDBName);
provider.connect();
provider.disconnect();
}
{
const newDoc = new DocCollection.Y.Doc();
const provider = createIndexedDBProvider(newDoc, rootDBName);
provider.connect();
provider.disconnect();
newDoc.getMap('map').forEach((value, key) => {
expect(value).toBe(parseInt(key));
});
}
});
test('beforeunload', async () => {
const oldAddEventListener = window.addEventListener;
window.addEventListener = vi.fn((event: string, fn, options) => {
expect(event).toBe('beforeunload');
return oldAddEventListener(event, fn, options);
});
const oldRemoveEventListener = window.removeEventListener;
window.removeEventListener = vi.fn((event: string, fn, options) => {
expect(event).toBe('beforeunload');
return oldRemoveEventListener(event, fn, options);
});
const doc = new YDoc({
guid: '1',
});
const provider = createIndexedDBProvider(doc);
const map = doc.getMap('map');
map.set('1', 1);
provider.connect();
await setTimeout(200);
expect(window.addEventListener).toBeCalledTimes(1);
expect(window.removeEventListener).toBeCalledTimes(1);
window.addEventListener = oldAddEventListener;
window.removeEventListener = oldRemoveEventListener;
});
});
describe('milestone', () => {
test('milestone', async () => {
const doc = new YDoc();
const map = doc.getMap('map');
const array = doc.getArray('array');
map.set('1', 1);
array.push([1]);
await markMilestone('1', doc, 'test1');
const milestones = await getMilestones('1');
assertExists(milestones);
expect(milestones).toBeDefined();
expect(Object.keys(milestones).length).toBe(1);
expect(milestones.test1).toBeInstanceOf(Uint8Array);
const snapshot = new YDoc();
applyUpdate(snapshot, milestones.test1);
{
const map = snapshot.getMap('map');
expect(map.get('1')).toBe(1);
}
map.set('1', 2);
{
const map = snapshot.getMap('map');
expect(map.get('1')).toBe(1);
}
revertUpdate(doc, milestones.test1, key =>
key === 'map' ? 'Map' : 'Array'
);
{
const map = doc.getMap('map');
expect(map.get('1')).toBe(1);
}
const fn = vi.fn(() => true);
doc.gcFilter = fn;
expect(fn).toBeCalledTimes(0);
for (let i = 0; i < 1e5; i++) {
map.set(`${i}`, i + 1);
}
for (let i = 0; i < 1e5; i++) {
map.delete(`${i}`);
}
for (let i = 0; i < 1e5; i++) {
map.set(`${i}`, i - 1);
}
expect(fn).toBeCalled();
const doc2 = new YDoc();
applyUpdate(doc2, encodeStateAsUpdate(doc));
revertUpdate(doc2, milestones.test1, key =>
key === 'map' ? 'Map' : 'Array'
);
{
const map = doc2.getMap('map');
expect(map.get('1')).toBe(1);
}
});
});
describe('subDoc', () => {
test('basic', async () => {
let json1: any, json2: any;
{
const doc = new YDoc({
guid: 'test',
});
const map = doc.getMap();
const subDoc = new YDoc();
subDoc.load();
map.set('1', subDoc);
map.set('2', 'test');
const provider = createIndexedDBProvider(doc);
provider.connect();
await setTimeout(200);
provider.disconnect();
json1 = doc.toJSON();
}
{
const doc = new YDoc({
guid: 'test',
});
const provider = createIndexedDBProvider(doc);
provider.connect();
await setTimeout(200);
const map = doc.getMap();
const subDoc = map.get('1') as YDoc;
subDoc.load();
provider.disconnect();
json2 = doc.toJSON();
}
// the following line compares {} with {}
expect(json1['']['1'].toJSON()).toEqual(json2['']['1'].toJSON());
expect(json1['']['2']).toEqual(json2['']['2']);
});
test('blocksuite', async () => {
const page0 = docCollection.createDoc({
id: 'page0',
});
page0.load();
const { paragraphBlockId: paragraphBlockIdPage1 } = initEmptyPage(page0);
const provider = createIndexedDBProvider(docCollection.doc, rootDBName);
provider.connect();
const page1 = docCollection.createDoc({
id: 'page1',
});
page1.load();
const { paragraphBlockId: paragraphBlockIdPage2 } = initEmptyPage(page1);
await setTimeout(200);
provider.disconnect();
{
const docCollection = new DocCollection({
id,
schema,
});
const provider = createIndexedDBProvider(docCollection.doc, rootDBName);
provider.connect();
await setTimeout(200);
const page0 = docCollection.getDoc('page0') as Doc;
page0.load();
await setTimeout(200);
{
const block = page0.getBlockById(paragraphBlockIdPage1);
assertExists(block);
}
const page1 = docCollection.getDoc('page1') as Doc;
page1.load();
await setTimeout(200);
{
const block = page1.getBlockById(paragraphBlockIdPage2);
assertExists(block);
}
}
});
});
describe('utils', () => {
test('download binary', async () => {
const page = docCollection.createDoc({ id: 'page0' });
page.load();
initEmptyPage(page);
const provider = createIndexedDBProvider(docCollection.doc, rootDBName);
provider.connect();
await setTimeout(200);
provider.disconnect();
const update = (await downloadBinary(
docCollection.id,
rootDBName
)) as Uint8Array;
expect(update).toBeInstanceOf(Uint8Array);
const newDocCollection = new DocCollection({
id,
schema,
});
applyUpdate(newDocCollection.doc, update);
await setTimeout();
expect(docCollection.doc.toJSON()['meta']).toEqual(
newDocCollection.doc.toJSON()['meta']
);
expect(Object.keys(docCollection.doc.toJSON()['spaces'])).toEqual(
Object.keys(newDocCollection.doc.toJSON()['spaces'])
);
});
test('overwrite binary', async () => {
const doc = new YDoc();
const map = doc.getMap();
map.set('1', 1);
await overwriteBinary('test', new Uint8Array(encodeStateAsUpdate(doc)));
{
const binary = await downloadBinary('test');
expect(binary).toEqual(new Uint8Array(encodeStateAsUpdate(doc)));
}
});
});

View File

@ -1,134 +0,0 @@
import { openDB } from 'idb';
import {
applyUpdate,
Doc,
encodeStateAsUpdate,
encodeStateVector,
UndoManager,
} from 'yjs';
import type { BlockSuiteBinaryDB, WorkspaceMilestone } from './shared';
import { dbVersion, DEFAULT_DB_NAME, upgradeDB } from './shared';
const snapshotOrigin = 'snapshot-origin';
/**
* @internal
*/
const saveAlert = (event: BeforeUnloadEvent) => {
event.preventDefault();
return (event.returnValue =
'Data is not saved. Are you sure you want to leave?');
};
export const writeOperation = async (op: Promise<unknown>) => {
window.addEventListener('beforeunload', saveAlert, {
capture: true,
});
await op;
window.removeEventListener('beforeunload', saveAlert, {
capture: true,
});
};
export function revertUpdate(
doc: Doc,
snapshotUpdate: Uint8Array,
getMetadata: (key: string) => 'Text' | 'Map' | 'Array'
) {
const snapshotDoc = new Doc();
applyUpdate(snapshotDoc, snapshotUpdate, snapshotOrigin);
const currentStateVector = encodeStateVector(doc);
const snapshotStateVector = encodeStateVector(snapshotDoc);
const changesSinceSnapshotUpdate = encodeStateAsUpdate(
doc,
snapshotStateVector
);
const undoManager = new UndoManager(
[...snapshotDoc.share.keys()].map(key => {
const type = getMetadata(key);
if (type === 'Text') {
return snapshotDoc.getText(key);
} else if (type === 'Map') {
return snapshotDoc.getMap(key);
} else if (type === 'Array') {
return snapshotDoc.getArray(key);
}
throw new Error('Unknown type');
}),
{
trackedOrigins: new Set([snapshotOrigin]),
}
);
applyUpdate(snapshotDoc, changesSinceSnapshotUpdate, snapshotOrigin);
undoManager.undo();
const revertChangesSinceSnapshotUpdate = encodeStateAsUpdate(
snapshotDoc,
currentStateVector
);
applyUpdate(doc, revertChangesSinceSnapshotUpdate, snapshotOrigin);
}
export class EarlyDisconnectError extends Error {
constructor() {
super('Early disconnect');
}
}
export class CleanupWhenConnectingError extends Error {
constructor() {
super('Cleanup when connecting');
}
}
export const markMilestone = async (
id: string,
doc: Doc,
name: string,
dbName = DEFAULT_DB_NAME
): Promise<void> => {
const dbPromise = openDB<BlockSuiteBinaryDB>(dbName, dbVersion, {
upgrade: upgradeDB,
});
const db = await dbPromise;
const store = db
.transaction('milestone', 'readwrite')
.objectStore('milestone');
const milestone = await store.get('id');
const binary = encodeStateAsUpdate(doc);
if (!milestone) {
await store.put({
id,
milestone: {
[name]: binary,
},
});
} else {
milestone.milestone[name] = binary;
await store.put(milestone);
}
};
export const getMilestones = async (
id: string,
dbName: string = DEFAULT_DB_NAME
): Promise<null | WorkspaceMilestone['milestone']> => {
const dbPromise = openDB<BlockSuiteBinaryDB>(dbName, dbVersion, {
upgrade: upgradeDB,
});
const db = await dbPromise;
const store = db
.transaction('milestone', 'readonly')
.objectStore('milestone');
const milestone = await store.get(id);
if (!milestone) {
return null;
}
return milestone.milestone;
};
export * from './provider';
export * from './shared';
export * from './utils';

View File

@ -1,157 +0,0 @@
import { assertExists } from '@blocksuite/global/utils';
import type { IDBPDatabase } from 'idb';
import { openDB } from 'idb';
import type { DocDataSource } from 'y-provider';
import { createLazyProvider, writeOperation } from 'y-provider';
import type { Doc } from 'yjs';
import { diffUpdate, encodeStateVectorFromUpdate } from 'yjs';
import type {
BlockSuiteBinaryDB,
IndexedDBProvider,
UpdateMessage,
} from './shared';
import { dbVersion, DEFAULT_DB_NAME, upgradeDB } from './shared';
import { mergeUpdates } from './utils';
let mergeCount = 500;
export function setMergeCount(count: number) {
mergeCount = count;
}
export const createIndexedDBDatasource = ({
dbName = DEFAULT_DB_NAME,
mergeCount,
}: {
dbName?: string;
mergeCount?: number;
}) => {
let dbPromise: Promise<IDBPDatabase<BlockSuiteBinaryDB>> | null = null;
const getDb = async () => {
if (dbPromise === null) {
dbPromise = openDB<BlockSuiteBinaryDB>(dbName, dbVersion, {
upgrade: upgradeDB,
});
}
return dbPromise;
};
const adapter = {
queryDocState: async (guid, options) => {
try {
const db = await getDb();
const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const data = await store.get(guid);
if (!data) {
return false;
}
const { updates } = data;
const update = mergeUpdates(updates.map(({ update }) => update));
const missing = options?.stateVector
? diffUpdate(update, options?.stateVector)
: update;
return { missing, state: encodeStateVectorFromUpdate(update) };
} catch (err: any) {
if (!err.message?.includes('The database connection is closing.')) {
throw err;
}
return false;
}
},
sendDocUpdate: async (guid, update) => {
try {
const db = await getDb();
const store = db
.transaction('workspace', 'readwrite')
.objectStore('workspace');
// TODO: maybe we do not need to get data every time
const { updates } = (await store.get(guid)) ?? { updates: [] };
let rows: UpdateMessage[] = [
...updates,
{ timestamp: Date.now(), update },
];
if (mergeCount && rows.length >= mergeCount) {
const merged = mergeUpdates(rows.map(({ update }) => update));
rows = [{ timestamp: Date.now(), update: merged }];
}
await writeOperation(
store.put({
id: guid,
updates: rows,
})
);
} catch (err: any) {
if (!err.message?.includes('The database connection is closing.')) {
throw err;
}
}
},
} satisfies DocDataSource;
return {
...adapter,
disconnect: () => {
getDb()
.then(db => db.close())
.then(() => {
dbPromise = null;
})
.catch(console.error);
},
cleanup: async () => {
const db = await getDb();
await db.clear('workspace');
},
};
};
/**
* We use `doc.guid` as the unique key, please make sure it not changes.
*/
export const createIndexedDBProvider = (
doc: Doc,
dbName: string = DEFAULT_DB_NAME
): IndexedDBProvider => {
const datasource = createIndexedDBDatasource({ dbName, mergeCount });
let provider: ReturnType<typeof createLazyProvider> | null = null;
const apis = {
get status() {
assertExists(provider);
return provider.status;
},
subscribeStatusChange(onStatusChange) {
assertExists(provider);
return provider.subscribeStatusChange(onStatusChange);
},
connect: () => {
if (apis.connected) {
apis.disconnect();
}
provider = createLazyProvider(doc, datasource, { origin: 'idb' });
provider.connect();
},
disconnect: () => {
datasource?.disconnect();
provider?.disconnect();
provider = null;
},
cleanup: async () => {
await datasource?.cleanup();
},
get connected() {
return provider?.connected || false;
},
datasource,
} satisfies IndexedDBProvider;
return apis;
};

View File

@ -1,50 +0,0 @@
import type { DBSchema, IDBPDatabase } from 'idb';
import type { DataSourceAdapter } from 'y-provider';
export const dbVersion = 1;
export const DEFAULT_DB_NAME = 'affine-local';
export function upgradeDB(db: IDBPDatabase<BlockSuiteBinaryDB>) {
db.createObjectStore('workspace', { keyPath: 'id' });
db.createObjectStore('milestone', { keyPath: 'id' });
}
export interface IndexedDBProvider extends DataSourceAdapter {
connect: () => void;
disconnect: () => void;
cleanup: () => Promise<void>;
readonly connected: boolean;
}
export type UpdateMessage = {
timestamp: number;
update: Uint8Array;
};
export type WorkspacePersist = {
id: string;
updates: UpdateMessage[];
};
export type WorkspaceMilestone = {
id: string;
milestone: Record<string, Uint8Array>;
};
export interface BlockSuiteBinaryDB extends DBSchema {
workspace: {
key: string;
value: WorkspacePersist;
};
milestone: {
key: string;
value: WorkspaceMilestone;
};
}
export interface OldYjsDB extends DBSchema {
updates: {
key: number;
value: Uint8Array;
};
}

View File

@ -1,205 +0,0 @@
import type { IDBPDatabase } from 'idb';
import { openDB } from 'idb';
import { applyUpdate, Doc, encodeStateAsUpdate } from 'yjs';
import type { BlockSuiteBinaryDB, OldYjsDB, UpdateMessage } from './shared';
import { dbVersion, DEFAULT_DB_NAME, upgradeDB } from './shared';
let allDb: IDBDatabaseInfo[];
export function mergeUpdates(updates: Uint8Array[]) {
const doc = new Doc();
updates.forEach(update => {
applyUpdate(doc, update);
});
return encodeStateAsUpdate(doc);
}
async function databaseExists(name: string): Promise<boolean> {
return new Promise(resolve => {
const req = indexedDB.open(name);
let existed = true;
req.onsuccess = function () {
req.result.close();
if (!existed) {
indexedDB.deleteDatabase(name);
}
resolve(existed);
};
req.onupgradeneeded = function () {
existed = false;
};
});
}
/**
* try to migrate the old database to the new database
* this function will be removed in the future
* since we don't need to support the old database
*/
export async function tryMigrate(
db: IDBPDatabase<BlockSuiteBinaryDB>,
id: string,
dbName = DEFAULT_DB_NAME
) {
do {
if (!allDb || localStorage.getItem(`${dbName}-migration`) !== 'true') {
try {
allDb = await indexedDB.databases();
} catch {
// in firefox, `indexedDB.databases` is not existed
if (await databaseExists(id)) {
await openDB<IDBPDatabase<OldYjsDB>>(id, 1).then(async oldDB => {
if (!oldDB.objectStoreNames.contains('updates')) {
return;
}
const t = oldDB
.transaction('updates', 'readonly')
.objectStore('updates');
const updates = await t.getAll();
if (
!Array.isArray(updates) ||
!updates.every(update => update instanceof Uint8Array)
) {
return;
}
const update = mergeUpdates(updates);
const workspaceTransaction = db
.transaction('workspace', 'readwrite')
.objectStore('workspace');
const data = await workspaceTransaction.get(id);
if (!data) {
console.log('upgrading the database');
await workspaceTransaction.put({
id,
updates: [
{
timestamp: Date.now(),
update,
},
],
});
}
});
break;
}
}
// run the migration
await Promise.all(
allDb &&
allDb.map(meta => {
if (meta.name && meta.version === 1) {
const name = meta.name;
const version = meta.version;
return openDB<IDBPDatabase<OldYjsDB>>(name, version).then(
async oldDB => {
if (!oldDB.objectStoreNames.contains('updates')) {
return;
}
const t = oldDB
.transaction('updates', 'readonly')
.objectStore('updates');
const updates = await t.getAll();
if (
!Array.isArray(updates) ||
!updates.every(update => update instanceof Uint8Array)
) {
return;
}
const update = mergeUpdates(updates);
const workspaceTransaction = db
.transaction('workspace', 'readwrite')
.objectStore('workspace');
const data = await workspaceTransaction.get(name);
if (!data) {
console.log('upgrading the database');
await workspaceTransaction.put({
id: name,
updates: [
{
timestamp: Date.now(),
update,
},
],
});
}
}
);
}
return void 0;
})
);
localStorage.setItem(`${dbName}-migration`, 'true');
break;
}
// eslint-disable-next-line no-constant-condition
} while (false);
}
export async function downloadBinary(
guid: string,
dbName = DEFAULT_DB_NAME
): Promise<UpdateMessage['update'] | false> {
const dbPromise = openDB<BlockSuiteBinaryDB>(dbName, dbVersion, {
upgrade: upgradeDB,
});
const db = await dbPromise;
const t = db.transaction('workspace', 'readonly').objectStore('workspace');
const doc = await t.get(guid);
if (!doc) {
return false;
} else {
return mergeUpdates(doc.updates.map(({ update }) => update));
}
}
export async function overwriteBinary(
guid: string,
update: UpdateMessage['update'],
dbName = DEFAULT_DB_NAME
) {
const dbPromise = openDB<BlockSuiteBinaryDB>(dbName, dbVersion, {
upgrade: upgradeDB,
});
const db = await dbPromise;
const t = db.transaction('workspace', 'readwrite').objectStore('workspace');
await t.put({
id: guid,
updates: [
{
timestamp: Date.now(),
update,
},
],
});
}
export async function pushBinary(
guid: string,
update: UpdateMessage['update'],
dbName = DEFAULT_DB_NAME
) {
const dbPromise = openDB<BlockSuiteBinaryDB>(dbName, dbVersion, {
upgrade: upgradeDB,
});
const db = await dbPromise;
const t = db.transaction('workspace', 'readwrite').objectStore('workspace');
const doc = await t.get(guid);
if (!doc) {
await t.put({
id: guid,
updates: [
{
timestamp: Date.now(),
update,
},
],
});
} else {
doc.updates.push({
timestamp: Date.now(),
update,
});
await t.put(doc);
}
}

View File

@ -1,17 +0,0 @@
{
"extends": "../../../tsconfig.json",
"include": ["./src"],
"compilerOptions": {
"composite": true,
"noEmit": false,
"outDir": "lib"
},
"references": [
{
"path": "./tsconfig.node.json"
},
{
"path": "../y-provider"
}
]
}

View File

@ -1,11 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"composite": true,
"module": "ESNext",
"moduleResolution": "Node",
"allowSyntheticDefaultImports": true,
"outDir": "lib"
},
"include": ["vite.config.ts"]
}

View File

@ -1,35 +0,0 @@
import { resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { defineConfig } from 'vite';
import dts from 'vite-plugin-dts';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
export default defineConfig({
build: {
minify: 'esbuild',
sourcemap: true,
lib: {
entry: resolve(__dirname, 'src/index.ts'),
fileName: 'index',
name: 'ToEverythingIndexedDBProvider',
formats: ['es', 'cjs', 'umd'],
},
rollupOptions: {
output: {
globals: {
idb: 'idb',
yjs: 'yjs',
'y-provider': 'yProvider',
},
},
external: ['idb', 'yjs', 'y-provider'],
},
},
plugins: [
dts({
entryRoot: resolve(__dirname, 'src'),
}),
],
});

View File

@ -1,8 +0,0 @@
# A set of provider utilities for Yjs
## createLazyProvider
A factory function to create a lazy provider. It will not download the document from the provider until the first time a document is loaded at the parent doc.
To use it, first define a `DatasourceDocAdapter`.
Then, create a `LazyProvider` with `createLazyProvider(rootDoc, datasource)`.

View File

@ -1,37 +0,0 @@
{
"name": "y-provider",
"type": "module",
"version": "0.14.0",
"description": "Yjs provider protocol for multi document support",
"exports": {
".": "./src/index.ts"
},
"files": [
"dist"
],
"publishConfig": {
"access": "public",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js",
"require": "./dist/index.cjs",
"default": "./dist/index.umd.cjs"
}
}
},
"scripts": {
"build": "vite build"
},
"devDependencies": {
"@blocksuite/store": "0.14.0-canary-202404280529-c8e5f89",
"vite": "^5.1.4",
"vite-plugin-dts": "3.7.3",
"vitest": "1.4.0",
"yjs": "^13.6.14"
},
"peerDependencies": {
"@blocksuite/global": "*",
"yjs": "^13"
}
}

View File

@ -1,235 +0,0 @@
import { setTimeout } from 'node:timers/promises';
import { describe, expect, test, vi } from 'vitest';
import { applyUpdate, Doc, encodeStateAsUpdate, encodeStateVector } from 'yjs';
import type { DocDataSource } from '../data-source';
import { createLazyProvider } from '../lazy-provider';
import { getDoc } from '../utils';
const createMemoryDatasource = (rootDoc: Doc) => {
const selfUpdateOrigin = Symbol('self-origin');
const listeners = new Set<(guid: string, update: Uint8Array) => void>();
function trackDoc(doc: Doc) {
doc.on('update', (update, origin) => {
if (origin === selfUpdateOrigin) {
return;
}
for (const listener of listeners) {
listener(doc.guid, update);
}
});
doc.on('subdocs', () => {
for (const subdoc of rootDoc.subdocs) {
trackDoc(subdoc);
}
});
}
trackDoc(rootDoc);
const adapter = {
queryDocState: async (guid, options) => {
const subdoc = getDoc(rootDoc, guid);
if (!subdoc) {
return false;
}
return {
missing: encodeStateAsUpdate(subdoc, options?.stateVector),
state: encodeStateVector(subdoc),
};
},
sendDocUpdate: async (guid, update) => {
const subdoc = getDoc(rootDoc, guid);
if (!subdoc) {
return;
}
applyUpdate(subdoc, update, selfUpdateOrigin);
},
onDocUpdate: callback => {
listeners.add(callback);
return () => {
listeners.delete(callback);
};
},
} satisfies DocDataSource;
return {
rootDoc, // expose rootDoc for testing
...adapter,
};
};
describe('y-provider', () => {
test('should sync a subdoc if it is loaded after connect', async () => {
const remoteRootDoc = new Doc(); // this is the remote doc lives in remote
const datasource = createMemoryDatasource(remoteRootDoc);
const remotesubdoc = new Doc();
remotesubdoc.getText('text').insert(0, 'test-subdoc-value');
// populate remote doc with simple data
remoteRootDoc.getMap('map').set('test-0', 'test-0-value');
remoteRootDoc.getMap('map').set('subdoc', remotesubdoc);
const rootDoc = new Doc({ guid: remoteRootDoc.guid }); // this is the doc that we want to sync
const provider = createLazyProvider(rootDoc, datasource);
provider.connect();
await setTimeout(); // wait for the provider to sync
const subdoc = rootDoc.getMap('map').get('subdoc') as Doc;
expect(rootDoc.getMap('map').get('test-0')).toBe('test-0-value');
expect(subdoc.getText('text').toJSON()).toBe('');
// onload, the provider should sync the subdoc
subdoc.load();
await setTimeout();
expect(subdoc.getText('text').toJSON()).toBe('test-subdoc-value');
remotesubdoc.getText('text').insert(0, 'prefix-');
await setTimeout();
expect(subdoc.getText('text').toJSON()).toBe('prefix-test-subdoc-value');
// disconnect then reconnect
provider.disconnect();
remotesubdoc.getText('text').delete(0, 'prefix-'.length);
await setTimeout();
expect(subdoc.getText('text').toJSON()).toBe('prefix-test-subdoc-value');
provider.connect();
await setTimeout();
expect(subdoc.getText('text').toJSON()).toBe('test-subdoc-value');
});
test('should sync a shouldLoad=true subdoc on connect', async () => {
const remoteRootDoc = new Doc(); // this is the remote doc lives in remote
const datasource = createMemoryDatasource(remoteRootDoc);
const remotesubdoc = new Doc();
remotesubdoc.getText('text').insert(0, 'test-subdoc-value');
// populate remote doc with simple data
remoteRootDoc.getMap('map').set('test-0', 'test-0-value');
remoteRootDoc.getMap('map').set('subdoc', remotesubdoc);
const rootDoc = new Doc({ guid: remoteRootDoc.guid }); // this is the doc that we want to sync
applyUpdate(rootDoc, encodeStateAsUpdate(remoteRootDoc)); // sync rootDoc with remoteRootDoc
const subdoc = rootDoc.getMap('map').get('subdoc') as Doc;
expect(subdoc.getText('text').toJSON()).toBe('');
subdoc.load();
const provider = createLazyProvider(rootDoc, datasource);
provider.connect();
await setTimeout(); // wait for the provider to sync
expect(subdoc.getText('text').toJSON()).toBe('test-subdoc-value');
});
test('should send existing local update to remote on connect', async () => {
const remoteRootDoc = new Doc(); // this is the remote doc lives in remote
const datasource = createMemoryDatasource(remoteRootDoc);
const rootDoc = new Doc({ guid: remoteRootDoc.guid }); // this is the doc that we want to sync
applyUpdate(rootDoc, encodeStateAsUpdate(remoteRootDoc)); // sync rootDoc with remoteRootDoc
rootDoc.getText('text').insert(0, 'test-value');
const provider = createLazyProvider(rootDoc, datasource);
provider.connect();
await setTimeout(); // wait for the provider to sync
expect(remoteRootDoc.getText('text').toJSON()).toBe('test-value');
});
test('should send local update to remote for subdoc after connect', async () => {
const remoteRootDoc = new Doc(); // this is the remote doc lives in remote
const datasource = createMemoryDatasource(remoteRootDoc);
const rootDoc = new Doc({ guid: remoteRootDoc.guid }); // this is the doc that we want to sync
const provider = createLazyProvider(rootDoc, datasource);
provider.connect();
await setTimeout(); // wait for the provider to sync
const subdoc = new Doc();
rootDoc.getMap('map').set('subdoc', subdoc);
subdoc.getText('text').insert(0, 'test-subdoc-value');
await setTimeout(); // wait for the provider to sync
const remoteSubdoc = remoteRootDoc.getMap('map').get('subdoc') as Doc;
expect(remoteSubdoc.getText('text').toJSON()).toBe('test-subdoc-value');
});
test('should not send local update to remote for subdoc after disconnect', async () => {
const remoteRootDoc = new Doc(); // this is the remote doc lives in remote
const datasource = createMemoryDatasource(remoteRootDoc);
const rootDoc = new Doc({ guid: remoteRootDoc.guid }); // this is the doc that we want to sync
const provider = createLazyProvider(rootDoc, datasource);
provider.connect();
await setTimeout(); // wait for the provider to sync
const subdoc = new Doc();
rootDoc.getMap('map').set('subdoc', subdoc);
await setTimeout(); // wait for the provider to sync
const remoteSubdoc = remoteRootDoc.getMap('map').get('subdoc') as Doc;
expect(remoteSubdoc.getText('text').toJSON()).toBe('');
provider.disconnect();
subdoc.getText('text').insert(0, 'test-subdoc-value');
await setTimeout();
expect(remoteSubdoc.getText('text').toJSON()).toBe('');
expect(provider.connected).toBe(false);
});
test('should not send remote update back', async () => {
const remoteRootDoc = new Doc(); // this is the remote doc lives in remote
const datasource = createMemoryDatasource(remoteRootDoc);
const spy = vi.spyOn(datasource, 'sendDocUpdate');
const rootDoc = new Doc({ guid: remoteRootDoc.guid }); // this is the doc that we want to sync
const provider = createLazyProvider(rootDoc, datasource);
provider.connect();
remoteRootDoc.getText('text').insert(0, 'test-value');
expect(spy).not.toBeCalled();
});
test('only sync', async () => {
const remoteRootDoc = new Doc(); // this is the remote doc lives in remote
const datasource = createMemoryDatasource(remoteRootDoc);
remoteRootDoc.getText().insert(0, 'hello, world!');
const rootDoc = new Doc({ guid: remoteRootDoc.guid }); // this is the doc that we want to sync
const provider = createLazyProvider(rootDoc, datasource);
await provider.sync(true);
expect(rootDoc.getText().toJSON()).toBe('hello, world!');
const remotesubdoc = new Doc();
remotesubdoc.getText('text').insert(0, 'test-subdoc-value');
remoteRootDoc.getMap('map').set('subdoc', remotesubdoc);
expect(rootDoc.subdocs.size).toBe(0);
await provider.sync(true);
expect(rootDoc.subdocs.size).toBe(1);
const subdoc = rootDoc.getMap('map').get('subdoc') as Doc;
expect(subdoc.getText('text').toJSON()).toBe('');
await provider.sync(true);
expect(subdoc.getText('text').toJSON()).toBe('');
await provider.sync(false);
expect(subdoc.getText('text').toJSON()).toBe('test-subdoc-value');
});
});

View File

@ -1,102 +0,0 @@
import type { Doc as YDoc } from 'yjs';
import { applyUpdate, encodeStateAsUpdate } from 'yjs';
import type { DocState } from './types';
export interface DocDataSource {
/**
* request diff update from other clients
*/
queryDocState: (
guid: string,
options?: {
stateVector?: Uint8Array;
targetClientId?: number;
}
) => Promise<DocState | false>;
/**
* send update to the datasource
*/
sendDocUpdate: (guid: string, update: Uint8Array) => Promise<void>;
/**
* listen to update from the datasource. Returns a function to unsubscribe.
* this is optional because some datasource might not support it
*/
onDocUpdate?(
callback: (guid: string, update: Uint8Array) => void
): () => void;
}
export async function syncDocFromDataSource(
rootDoc: YDoc,
datasource: DocDataSource
) {
const downloadDocStateRecursively = async (doc: YDoc) => {
const docState = await datasource.queryDocState(doc.guid);
if (docState) {
applyUpdate(doc, docState.missing, 'sync-doc-from-datasource');
}
await Promise.all(
[...doc.subdocs].map(async subdoc => {
await downloadDocStateRecursively(subdoc);
})
);
};
await downloadDocStateRecursively(rootDoc);
}
export async function syncDataSourceFromDoc(
rootDoc: YDoc,
datasource: DocDataSource
) {
const uploadDocStateRecursively = async (doc: YDoc) => {
await datasource.sendDocUpdate(doc.guid, encodeStateAsUpdate(doc));
await Promise.all(
[...doc.subdocs].map(async subdoc => {
await uploadDocStateRecursively(subdoc);
})
);
};
await uploadDocStateRecursively(rootDoc);
}
/**
* query the datasource from source, and save the latest update to target
*
* @example
* bindDataSource(socketIO, indexedDB)
* bindDataSource(socketIO, sqlite)
*/
export async function syncDataSource(
listDocGuids: () => string[],
remoteDataSource: DocDataSource,
localDataSource: DocDataSource
) {
const guids = listDocGuids();
await Promise.all(
guids.map(guid => {
return localDataSource.queryDocState(guid).then(async docState => {
const remoteDocState = await (async () => {
if (docState) {
return remoteDataSource.queryDocState(guid, {
stateVector: docState.state,
});
} else {
return remoteDataSource.queryDocState(guid);
}
})();
if (remoteDocState) {
const missing = remoteDocState.missing;
if (missing.length === 2 && missing[0] === 0 && missing[1] === 0) {
// empty update
return;
}
await localDataSource.sendDocUpdate(guid, remoteDocState.missing);
}
});
})
);
}

View File

@ -1,4 +0,0 @@
export * from './data-source';
export * from './lazy-provider';
export * from './types';
export * from './utils';

View File

@ -1,358 +0,0 @@
import { assertExists } from '@blocksuite/global/utils';
import type { Doc } from 'yjs';
import { applyUpdate, encodeStateAsUpdate, encodeStateVector } from 'yjs';
import type { DocDataSource } from './data-source';
import type { DataSourceAdapter, Status } from './types';
function getDoc(doc: Doc, guid: string): Doc | undefined {
if (doc.guid === guid) {
return doc;
}
for (const subdoc of doc.subdocs) {
const found = getDoc(subdoc, guid);
if (found) {
return found;
}
}
return undefined;
}
interface LazyProviderOptions {
origin?: string;
}
export type DocProvider = {
// backport from `@blocksuite/store`
passive: true;
sync(onlyRootDoc?: boolean): Promise<void>;
get connected(): boolean;
connect(): void;
disconnect(): void;
};
/**
* Creates a lazy provider that connects to a datasource and synchronizes a root document.
*/
export const createLazyProvider = (
rootDoc: Doc,
datasource: DocDataSource,
options: LazyProviderOptions = {}
): DocProvider & DataSourceAdapter => {
let connected = false;
const pendingMap = new Map<string, Uint8Array[]>(); // guid -> pending-updates
const disposableMap = new Map<string, Set<() => void>>();
const connectedDocs = new Set<string>();
let abortController: AbortController | null = null;
const { origin = 'lazy-provider' } = options;
// todo: should we use a real state machine here like `xstate`?
let currentStatus: Status = {
type: 'idle',
};
let syncingStack = 0;
const callbackSet = new Set<() => void>();
const changeStatus = (newStatus: Status) => {
// simulate a stack, each syncing and synced should be paired
if (newStatus.type === 'syncing') {
syncingStack++;
} else if (newStatus.type === 'synced' || newStatus.type === 'error') {
syncingStack--;
}
if (syncingStack < 0) {
console.error(
'syncingStatus < 0, this should not happen',
options.origin
);
}
if (syncingStack === 0) {
currentStatus = newStatus;
}
if (newStatus.type !== 'synced') {
currentStatus = newStatus;
}
if (syncingStack === 0) {
if (!connected) {
currentStatus = {
type: 'idle',
};
} else {
currentStatus = {
type: 'synced',
};
}
}
callbackSet.forEach(cb => cb());
};
async function syncDoc(doc: Doc) {
const guid = doc.guid;
{
const update = await datasource.queryDocState(guid);
let hasUpdate = false;
if (
update &&
update.missing.length !== 2 &&
update.missing[0] !== 0 &&
update.missing[1] !== 0
) {
applyUpdate(doc, update.missing, origin);
hasUpdate = true;
}
if (hasUpdate) {
await datasource.sendDocUpdate(
guid,
encodeStateAsUpdate(doc, update ? update.state : undefined)
);
}
}
if (!connected) {
return;
}
changeStatus({
type: 'syncing',
});
const remoteUpdate = await datasource
.queryDocState(guid, {
stateVector: encodeStateVector(doc),
})
.then(remoteUpdate => {
changeStatus({
type: 'synced',
});
return remoteUpdate;
})
.catch(error => {
changeStatus({
type: 'error',
error,
});
throw error;
});
pendingMap.set(guid, []);
if (remoteUpdate) {
applyUpdate(doc, remoteUpdate.missing, origin);
}
if (!connected) {
return;
}
// perf: optimize me
// it is possible the doc is only in memory but not yet in the datasource
// we need to send the whole update to the datasource
await datasource.sendDocUpdate(
guid,
encodeStateAsUpdate(doc, remoteUpdate ? remoteUpdate.state : undefined)
);
doc.emit('sync', [true, doc]);
}
/**
* Sets up event listeners for a Yjs document.
* @param doc - The Yjs document to set up listeners for.
*/
function setupDocListener(doc: Doc) {
const disposables = new Set<() => void>();
disposableMap.set(doc.guid, disposables);
const updateHandler = async (update: Uint8Array, updateOrigin: unknown) => {
if (origin === updateOrigin) {
return;
}
changeStatus({
type: 'syncing',
});
datasource
.sendDocUpdate(doc.guid, update)
.then(() => {
changeStatus({
type: 'synced',
});
})
.catch(error => {
changeStatus({
type: 'error',
error,
});
console.error(error);
});
};
const subdocsHandler = (event: {
loaded: Set<Doc>;
removed: Set<Doc>;
added: Set<Doc>;
}) => {
event.loaded.forEach(subdoc => {
connectDoc(subdoc).catch(console.error);
});
event.removed.forEach(subdoc => {
disposeDoc(subdoc);
});
};
doc.on('update', updateHandler);
doc.on('subdocs', subdocsHandler);
// todo: handle destroy?
disposables.add(() => {
doc.off('update', updateHandler);
doc.off('subdocs', subdocsHandler);
});
}
/**
* Sets up event listeners for the datasource.
* Specifically, listens for updates to documents and applies them to the corresponding Yjs document.
*/
function setupDatasourceListeners() {
assertExists(abortController, 'abortController should be defined');
const unsubscribe = datasource.onDocUpdate?.((guid, update) => {
changeStatus({
type: 'syncing',
});
const doc = getDoc(rootDoc, guid);
if (doc) {
applyUpdate(doc, update, origin);
//
if (pendingMap.has(guid)) {
pendingMap
.get(guid)
?.forEach(update => applyUpdate(doc, update, origin));
pendingMap.delete(guid);
}
} else {
// This case happens when the father doc is not yet updated,
// so that the child doc is not yet created.
// We need to put it into cache so that it can be applied later.
console.warn('doc not found', guid);
pendingMap.set(guid, (pendingMap.get(guid) ?? []).concat(update));
}
changeStatus({
type: 'synced',
});
});
abortController.signal.addEventListener('abort', () => {
unsubscribe?.();
});
}
// when a subdoc is loaded, we need to sync it with the datasource and setup listeners
async function connectDoc(doc: Doc) {
// skip if already connected
if (connectedDocs.has(doc.guid)) {
return;
}
connectedDocs.add(doc.guid);
setupDocListener(doc);
await syncDoc(doc);
await Promise.all(
[...doc.subdocs]
.filter(subdoc => subdoc.shouldLoad)
.map(subdoc => connectDoc(subdoc))
);
}
function disposeDoc(doc: Doc) {
connectedDocs.delete(doc.guid);
const disposables = disposableMap.get(doc.guid);
if (disposables) {
disposables.forEach(dispose => dispose());
disposableMap.delete(doc.guid);
}
// also dispose all subdocs
doc.subdocs.forEach(disposeDoc);
}
function disposeAll() {
disposableMap.forEach(disposables => {
disposables.forEach(dispose => dispose());
});
disposableMap.clear();
connectedDocs.clear();
}
/**
* Connects to the datasource and sets up event listeners for document updates.
*/
function connect() {
connected = true;
abortController = new AbortController();
changeStatus({
type: 'syncing',
});
// root doc should be already loaded,
// but we want to populate the cache for later update events
connectDoc(rootDoc)
.then(() => {
changeStatus({
type: 'synced',
});
})
.catch(error => {
changeStatus({
type: 'error',
error,
});
console.error(error);
});
setupDatasourceListeners();
}
async function disconnect() {
connected = false;
disposeAll();
assertExists(abortController, 'abortController should be defined');
abortController.abort();
abortController = null;
}
const syncDocRecursive = async (doc: Doc) => {
await syncDoc(doc);
await Promise.all(
[...doc.subdocs.values()].map(subdoc => syncDocRecursive(subdoc))
);
};
return {
sync: async onlyRootDoc => {
connected = true;
try {
if (onlyRootDoc) {
await syncDoc(rootDoc);
} else {
await syncDocRecursive(rootDoc);
}
} finally {
connected = false;
}
},
get status() {
return currentStatus;
},
subscribeStatusChange(cb: () => void) {
callbackSet.add(cb);
return () => {
callbackSet.delete(cb);
};
},
get connected() {
return connected;
},
passive: true,
connect,
disconnect,
datasource,
};
};

View File

@ -1,35 +0,0 @@
import type { DocDataSource } from './data-source';
export type Status =
| {
type: 'idle';
}
| {
type: 'syncing';
}
| {
type: 'synced';
}
| {
type: 'error';
error: unknown;
};
export interface DataSourceAdapter {
datasource: DocDataSource;
readonly status: Status;
subscribeStatusChange(onStatusChange: () => void): () => void;
}
export interface DocState {
/**
* The missing structs of client queries with self state.
*/
missing: Uint8Array;
/**
* The full state of remote, used to prepare for diff sync.
*/
state?: Uint8Array;
}

View File

@ -1,30 +0,0 @@
import type { Doc } from 'yjs';
export function getDoc(doc: Doc, guid: string): Doc | undefined {
if (doc.guid === guid) {
return doc;
}
for (const subdoc of doc.subdocs) {
const found = getDoc(subdoc, guid);
if (found) {
return found;
}
}
return undefined;
}
const saveAlert = (event: BeforeUnloadEvent) => {
event.preventDefault();
return (event.returnValue =
'Data is not saved. Are you sure you want to leave?');
};
export const writeOperation = async (op: Promise<unknown>) => {
window.addEventListener('beforeunload', saveAlert, {
capture: true,
});
await op;
window.removeEventListener('beforeunload', saveAlert, {
capture: true,
});
};

View File

@ -1,9 +0,0 @@
{
"extends": "../../../tsconfig.json",
"include": ["./src"],
"compilerOptions": {
"composite": true,
"noEmit": false,
"outDir": "lib"
}
}

View File

@ -1,27 +0,0 @@
import { resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { defineConfig } from 'vite';
import dts from 'vite-plugin-dts';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
export default defineConfig({
build: {
minify: 'esbuild',
sourcemap: true,
lib: {
entry: resolve(__dirname, 'src/index.ts'),
fileName: 'index',
name: 'ToEverythingIndexedDBProvider',
},
rollupOptions: {
external: ['idb', 'yjs'],
},
},
plugins: [
dts({
entryRoot: resolve(__dirname, 'src'),
}),
],
});

View File

@ -7,10 +7,15 @@ import { listHistoryQuery, recoverDocMutation } from '@affine/graphql';
import { assertEquals } from '@blocksuite/global/utils';
import { DocCollection } from '@blocksuite/store';
import { globalBlockSuiteSchema } from '@toeverything/infra';
import { revertUpdate } from '@toeverything/y-indexeddb';
import { useEffect, useMemo } from 'react';
import useSWRImmutable from 'swr/immutable';
import { applyUpdate, encodeStateAsUpdate } from 'yjs';
import {
applyUpdate,
Doc as YDoc,
encodeStateAsUpdate,
encodeStateVector,
UndoManager,
} from 'yjs';
import {
useMutateQueryResource,
@ -180,6 +185,43 @@ export const historyListGroupByDay = (histories: DocHistory[]) => {
return [...map.entries()];
};
export function revertUpdate(
doc: YDoc,
snapshotUpdate: Uint8Array,
getMetadata: (key: string) => 'Text' | 'Map' | 'Array'
) {
const snapshotDoc = new YDoc();
applyUpdate(snapshotDoc, snapshotUpdate);
const currentStateVector = encodeStateVector(doc);
const snapshotStateVector = encodeStateVector(snapshotDoc);
const changesSinceSnapshotUpdate = encodeStateAsUpdate(
doc,
snapshotStateVector
);
const undoManager = new UndoManager(
[...snapshotDoc.share.keys()].map(key => {
const type = getMetadata(key);
if (type === 'Text') {
return snapshotDoc.getText(key);
} else if (type === 'Map') {
return snapshotDoc.getMap(key);
} else if (type === 'Array') {
return snapshotDoc.getArray(key);
}
throw new Error('Unknown type');
})
);
applyUpdate(snapshotDoc, changesSinceSnapshotUpdate);
undoManager.undo();
const revertChangesSinceSnapshotUpdate = encodeStateAsUpdate(
snapshotDoc,
currentStateVector
);
applyUpdate(doc, revertChangesSinceSnapshotUpdate);
}
export const useRestorePage = (
docCollection: DocCollection,
pageId: string

View File

@ -26,9 +26,6 @@
{
"path": "../../common/env"
},
{
"path": "../../common/y-indexeddb"
},
{
"path": "./tsconfig.node.json"
},

View File

@ -19,8 +19,6 @@
"i18n",
"native",
"templates",
"y-indexeddb",
"y-provider",
"debug",
"storage",
"infra"

View File

@ -65,7 +65,6 @@
"@affine/electron/scripts/*": ["./packages/frontend/electron/scripts/*"],
"@affine-test/kit/*": ["./tests/kit/*"],
"@affine-test/fixtures/*": ["./tests/fixtures/*"],
"@toeverything/y-indexeddb": ["./packages/common/y-indexeddb/src"],
"@toeverything/infra": ["./packages/common/infra/src"],
"@affine/native": ["./packages/frontend/native/index.d.ts"],
"@affine/native/*": ["./packages/frontend/native/*"],
@ -115,9 +114,6 @@
{
"path": "./packages/common/infra"
},
{
"path": "./packages/common/y-indexeddb"
},
// Tools
{
"path": "./tools/cli"

177
yarn.lock
View File

@ -7436,39 +7436,6 @@ __metadata:
languageName: node
linkType: hard
"@microsoft/api-extractor-model@npm:7.28.3":
version: 7.28.3
resolution: "@microsoft/api-extractor-model@npm:7.28.3"
dependencies:
"@microsoft/tsdoc": "npm:0.14.2"
"@microsoft/tsdoc-config": "npm:~0.16.1"
"@rushstack/node-core-library": "npm:3.62.0"
checksum: 10/704b8bfbf0b93c1d0605506a5a34ba3c68794d451f4b1dbfdc58fc142200c4d4391a435dd13d2d9470daaf4263ccdcee35f7e1806d1978cc64df6d0483481f94
languageName: node
linkType: hard
"@microsoft/api-extractor@npm:7.39.0":
version: 7.39.0
resolution: "@microsoft/api-extractor@npm:7.39.0"
dependencies:
"@microsoft/api-extractor-model": "npm:7.28.3"
"@microsoft/tsdoc": "npm:0.14.2"
"@microsoft/tsdoc-config": "npm:~0.16.1"
"@rushstack/node-core-library": "npm:3.62.0"
"@rushstack/rig-package": "npm:0.5.1"
"@rushstack/ts-command-line": "npm:4.17.1"
colors: "npm:~1.2.1"
lodash: "npm:~4.17.15"
resolve: "npm:~1.22.1"
semver: "npm:~7.5.4"
source-map: "npm:~0.6.1"
typescript: "npm:5.3.3"
bin:
api-extractor: bin/api-extractor
checksum: 10/b05f525b428cbacf26bc45394b84d7a73ab61b0bce7b77f82d0e43a332f019e94ca24d83f94d5083e9150efbc916cc35aa195d4f62ceca6fa168eb796d0d8af2
languageName: node
linkType: hard
"@microsoft/api-extractor@npm:7.43.0":
version: 7.43.0
resolution: "@microsoft/api-extractor@npm:7.43.0"
@ -11922,26 +11889,6 @@ __metadata:
languageName: node
linkType: hard
"@rushstack/node-core-library@npm:3.62.0":
version: 3.62.0
resolution: "@rushstack/node-core-library@npm:3.62.0"
dependencies:
colors: "npm:~1.2.1"
fs-extra: "npm:~7.0.1"
import-lazy: "npm:~4.0.0"
jju: "npm:~1.4.0"
resolve: "npm:~1.22.1"
semver: "npm:~7.5.4"
z-schema: "npm:~5.0.2"
peerDependencies:
"@types/node": "*"
peerDependenciesMeta:
"@types/node":
optional: true
checksum: 10/61e22a1a04cf194f12b05acb643a361a74a34944a48380f61ba9d5f4b6c3684a7ae5669af5013b5549101647c6862548e11e1b8c60bdb687541f09133bbdd976
languageName: node
linkType: hard
"@rushstack/node-core-library@npm:4.0.2":
version: 4.0.2
resolution: "@rushstack/node-core-library@npm:4.0.2"
@ -11961,16 +11908,6 @@ __metadata:
languageName: node
linkType: hard
"@rushstack/rig-package@npm:0.5.1":
version: 0.5.1
resolution: "@rushstack/rig-package@npm:0.5.1"
dependencies:
resolve: "npm:~1.22.1"
strip-json-comments: "npm:~3.1.1"
checksum: 10/9e5d425f60bb1e23371ecc086eaca838651ced904da33b690103ac731820e65a8a3720243f9e03578dfd1efa067fec9c6d762f16b3bb8cf92b56254d5f906989
languageName: node
linkType: hard
"@rushstack/rig-package@npm:0.5.2":
version: 0.5.2
resolution: "@rushstack/rig-package@npm:0.5.2"
@ -11996,18 +11933,6 @@ __metadata:
languageName: node
linkType: hard
"@rushstack/ts-command-line@npm:4.17.1":
version: 4.17.1
resolution: "@rushstack/ts-command-line@npm:4.17.1"
dependencies:
"@types/argparse": "npm:1.0.38"
argparse: "npm:~1.0.9"
colors: "npm:~1.2.1"
string-argv: "npm:~0.3.1"
checksum: 10/75407f6a42fda364ec9f945ebd346c632a23dd97d7ed5ad108c264d72ee0370d3d912cc6c16af6973bbc3f5f92b845b63fb13da75a077d61f7e34e69f00b8823
languageName: node
linkType: hard
"@rushstack/ts-command-line@npm:4.19.1":
version: 4.19.1
resolution: "@rushstack/ts-command-line@npm:4.19.1"
@ -14459,27 +14384,6 @@ __metadata:
languageName: node
linkType: hard
"@toeverything/y-indexeddb@workspace:packages/common/y-indexeddb":
version: 0.0.0-use.local
resolution: "@toeverything/y-indexeddb@workspace:packages/common/y-indexeddb"
dependencies:
"@blocksuite/blocks": "npm:0.14.0-canary-202404280529-c8e5f89"
"@blocksuite/global": "npm:0.14.0-canary-202404280529-c8e5f89"
"@blocksuite/store": "npm:0.14.0-canary-202404280529-c8e5f89"
fake-indexeddb: "npm:^5.0.2"
idb: "npm:^8.0.0"
nanoid: "npm:^5.0.7"
vite: "npm:^5.2.8"
vite-plugin-dts: "npm:3.8.1"
vitest: "npm:1.4.0"
y-indexeddb: "npm:^9.0.12"
y-provider: "workspace:*"
yjs: "npm:^13.6.14"
peerDependencies:
yjs: ^13
languageName: unknown
linkType: soft
"@tokenizer/token@npm:^0.3.0":
version: 0.3.0
resolution: "@tokenizer/token@npm:0.3.0"
@ -16121,7 +16025,7 @@ __metadata:
languageName: node
linkType: hard
"@vue/language-core@npm:1.8.27, @vue/language-core@npm:^1.8.26, @vue/language-core@npm:^1.8.27":
"@vue/language-core@npm:1.8.27, @vue/language-core@npm:^1.8.27":
version: 1.8.27
resolution: "@vue/language-core@npm:1.8.27"
dependencies:
@ -18964,13 +18868,6 @@ __metadata:
languageName: node
linkType: hard
"colors@npm:~1.2.1":
version: 1.2.5
resolution: "colors@npm:1.2.5"
checksum: 10/fe30007df0f62abedc2726990d0951f19292d85686dffcc76fa96ee9dc4e1a987d50b34aa02796e88627709c54a52f07c057bf1da4b7302c06eda8e1afd2f09a
languageName: node
linkType: hard
"columnify@npm:^1.6.0":
version: 1.6.0
resolution: "columnify@npm:1.6.0"
@ -26782,7 +26679,7 @@ __metadata:
languageName: node
linkType: hard
"lib0@npm:^0.2.74, lib0@npm:^0.2.85, lib0@npm:^0.2.86, lib0@npm:^0.2.93":
"lib0@npm:^0.2.85, lib0@npm:^0.2.86, lib0@npm:^0.2.93":
version: 0.2.93
resolution: "lib0@npm:0.2.93"
dependencies:
@ -35713,16 +35610,6 @@ __metadata:
languageName: node
linkType: hard
"typescript@npm:5.3.3":
version: 5.3.3
resolution: "typescript@npm:5.3.3"
bin:
tsc: bin/tsc
tsserver: bin/tsserver
checksum: 10/6e4e6a14a50c222b3d14d4ea2f729e79f972fa536ac1522b91202a9a65af3605c2928c4a790a4a50aa13694d461c479ba92cedaeb1e7b190aadaa4e4b96b8e18
languageName: node
linkType: hard
"typescript@npm:5.4.2":
version: 5.4.2
resolution: "typescript@npm:5.4.2"
@ -35743,16 +35630,6 @@ __metadata:
languageName: node
linkType: hard
"typescript@patch:typescript@npm%3A5.3.3#optional!builtin<compat/typescript>":
version: 5.3.3
resolution: "typescript@patch:typescript@npm%3A5.3.3#optional!builtin<compat/typescript>::version=5.3.3&hash=e012d7"
bin:
tsc: bin/tsc
tsserver: bin/tsserver
checksum: 10/c93786fcc9a70718ba1e3819bab56064ead5817004d1b8186f8ca66165f3a2d0100fee91fa64c840dcd45f994ca5d615d8e1f566d39a7470fc1e014dbb4cf15d
languageName: node
linkType: hard
"typescript@patch:typescript@npm%3A5.4.2#optional!builtin<compat/typescript>":
version: 5.4.2
resolution: "typescript@patch:typescript@npm%3A5.4.2#optional!builtin<compat/typescript>::version=5.4.2&hash=5adc0c"
@ -36603,26 +36480,6 @@ __metadata:
languageName: node
linkType: hard
"vite-plugin-dts@npm:3.7.3":
version: 3.7.3
resolution: "vite-plugin-dts@npm:3.7.3"
dependencies:
"@microsoft/api-extractor": "npm:7.39.0"
"@rollup/pluginutils": "npm:^5.1.0"
"@vue/language-core": "npm:^1.8.26"
debug: "npm:^4.3.4"
kolorist: "npm:^1.8.0"
vue-tsc: "npm:^1.8.26"
peerDependencies:
typescript: "*"
vite: "*"
peerDependenciesMeta:
vite:
optional: true
checksum: 10/b6adf0934a219b5b6a56f6ddf13b388533856eb55d167c245fed7de30352c285eeadc3387df6f5c6617c0c94640bd7b32bf65640a53484d50c6f0d2218918b51
languageName: node
linkType: hard
"vite-plugin-dts@npm:3.8.1":
version: 3.8.1
resolution: "vite-plugin-dts@npm:3.8.1"
@ -36674,7 +36531,7 @@ __metadata:
languageName: node
linkType: hard
"vite@npm:^5.0.0, vite@npm:^5.0.11, vite@npm:^5.1.4, vite@npm:^5.2.8":
"vite@npm:^5.0.0, vite@npm:^5.0.11, vite@npm:^5.2.8":
version: 5.2.9
resolution: "vite@npm:5.2.9"
dependencies:
@ -36842,7 +36699,7 @@ __metadata:
languageName: node
linkType: hard
"vue-tsc@npm:^1.8.26, vue-tsc@npm:^1.8.27":
"vue-tsc@npm:^1.8.27":
version: 1.8.27
resolution: "vue-tsc@npm:1.8.27"
dependencies:
@ -37605,17 +37462,6 @@ __metadata:
languageName: node
linkType: hard
"y-indexeddb@npm:^9.0.12":
version: 9.0.12
resolution: "y-indexeddb@npm:9.0.12"
dependencies:
lib0: "npm:^0.2.74"
peerDependencies:
yjs: ^13.0.0
checksum: 10/6468ebdcb2936a5fe10e4fb57cbe2d90260c44b63c6ecf6a26cc3652d21bd3be58bb76dfb56dbe56dd71b320042bfd3663274217b89300f2f0db92611fc9e7c6
languageName: node
linkType: hard
"y-protocols@npm:^1.0.6":
version: 1.0.6
resolution: "y-protocols@npm:1.0.6"
@ -37627,21 +37473,6 @@ __metadata:
languageName: node
linkType: hard
"y-provider@workspace:*, y-provider@workspace:packages/common/y-provider":
version: 0.0.0-use.local
resolution: "y-provider@workspace:packages/common/y-provider"
dependencies:
"@blocksuite/store": "npm:0.14.0-canary-202404280529-c8e5f89"
vite: "npm:^5.1.4"
vite-plugin-dts: "npm:3.7.3"
vitest: "npm:1.4.0"
yjs: "npm:^13.6.14"
peerDependencies:
"@blocksuite/global": "*"
yjs: ^13
languageName: unknown
linkType: soft
"y18n@npm:^4.0.0":
version: 4.0.3
resolution: "y18n@npm:4.0.3"