mirror of
https://github.com/toeverything/AFFiNE.git
synced 2024-11-23 13:22:30 +03:00
feat(infra): new doc sync engine (#6205)
https://github.com/toeverything/AFFiNE/blob/eyhn/feat/new-sync/packages/common/infra/src/workspace/engine/doc/README.md
This commit is contained in:
parent
05c44db5a9
commit
34703a3b7d
10
.eslintrc.js
10
.eslintrc.js
@ -31,11 +31,6 @@ const createPattern = packageName => [
|
||||
message: 'Use `useNavigateHelper` instead',
|
||||
importNames: ['useNavigate'],
|
||||
},
|
||||
{
|
||||
group: ['yjs'],
|
||||
message: 'Do not use this API because it has a bug',
|
||||
importNames: ['mergeUpdates'],
|
||||
},
|
||||
{
|
||||
group: ['@affine/env/constant'],
|
||||
message:
|
||||
@ -168,11 +163,6 @@ const config = {
|
||||
message: 'Use `useNavigateHelper` instead',
|
||||
importNames: ['useNavigate'],
|
||||
},
|
||||
{
|
||||
group: ['yjs'],
|
||||
message: 'Do not use this API because it has a bug',
|
||||
importNames: ['mergeUpdates'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
@ -55,6 +55,16 @@ export function isEmptyBuffer(buf: Buffer): boolean {
|
||||
const MAX_SEQ_NUM = 0x3fffffff; // u31
|
||||
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
|
||||
|
||||
interface DocResponse {
|
||||
doc: Doc;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
interface BinaryResponse {
|
||||
binary: Buffer;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Since we can't directly save all client updates into database, in which way the database will overload,
|
||||
* we need to buffer the updates and merge them to reduce db write.
|
||||
@ -332,8 +342,8 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
/**
|
||||
* Get latest timestamp of all docs in the workspace.
|
||||
*/
|
||||
@CallTimer('doc', 'get_stats')
|
||||
async getStats(workspaceId: string, after: number | undefined = 0) {
|
||||
@CallTimer('doc', 'get_doc_timestamps')
|
||||
async getDocTimestamps(workspaceId: string, after: number | undefined = 0) {
|
||||
const snapshots = await this.db.snapshot.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
@ -378,13 +388,18 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
/**
|
||||
* get the latest doc with all update applied.
|
||||
*/
|
||||
async get(workspaceId: string, guid: string): Promise<Doc | null> {
|
||||
async get(workspaceId: string, guid: string): Promise<DocResponse | null> {
|
||||
const result = await this._get(workspaceId, guid);
|
||||
if (result) {
|
||||
if ('doc' in result) {
|
||||
return result.doc;
|
||||
} else if ('snapshot' in result) {
|
||||
return this.recoverDoc(result.snapshot);
|
||||
return result;
|
||||
} else {
|
||||
const doc = await this.recoverDoc(result.binary);
|
||||
|
||||
return {
|
||||
doc,
|
||||
timestamp: result.timestamp,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -394,13 +409,19 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
/**
|
||||
* get the latest doc binary with all update applied.
|
||||
*/
|
||||
async getBinary(workspaceId: string, guid: string): Promise<Buffer | null> {
|
||||
async getBinary(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<BinaryResponse | null> {
|
||||
const result = await this._get(workspaceId, guid);
|
||||
if (result) {
|
||||
if ('doc' in result) {
|
||||
return Buffer.from(encodeStateAsUpdate(result.doc));
|
||||
} else if ('snapshot' in result) {
|
||||
return result.snapshot;
|
||||
return {
|
||||
binary: Buffer.from(encodeStateAsUpdate(result.doc)),
|
||||
timestamp: result.timestamp,
|
||||
};
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@ -410,16 +431,27 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
/**
|
||||
* get the latest doc state vector with all update applied.
|
||||
*/
|
||||
async getState(workspaceId: string, guid: string): Promise<Buffer | null> {
|
||||
async getDocState(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<BinaryResponse | null> {
|
||||
const snapshot = await this.getSnapshot(workspaceId, guid);
|
||||
const updates = await this.getUpdates(workspaceId, guid);
|
||||
|
||||
if (updates.length) {
|
||||
const doc = await this.squash(snapshot, updates);
|
||||
return Buffer.from(encodeStateVector(doc));
|
||||
const { doc, timestamp } = await this.squash(snapshot, updates);
|
||||
return {
|
||||
binary: Buffer.from(encodeStateVector(doc)),
|
||||
timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
return snapshot ? snapshot.state : null;
|
||||
return snapshot?.state
|
||||
? {
|
||||
binary: snapshot.state,
|
||||
timestamp: snapshot.updatedAt.getTime(),
|
||||
}
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -587,17 +619,17 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
private async _get(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<{ doc: Doc } | { snapshot: Buffer } | null> {
|
||||
): Promise<DocResponse | BinaryResponse | null> {
|
||||
const snapshot = await this.getSnapshot(workspaceId, guid);
|
||||
const updates = await this.getUpdates(workspaceId, guid);
|
||||
|
||||
if (updates.length) {
|
||||
return {
|
||||
doc: await this.squash(snapshot, updates),
|
||||
};
|
||||
return this.squash(snapshot, updates);
|
||||
}
|
||||
|
||||
return snapshot ? { snapshot: snapshot.blob } : null;
|
||||
return snapshot
|
||||
? { binary: snapshot.blob, timestamp: snapshot.updatedAt.getTime() }
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -605,7 +637,10 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
* and delete the updates records at the same time.
|
||||
*/
|
||||
@CallTimer('doc', 'squash')
|
||||
private async squash(snapshot: Snapshot | null, updates: Update[]) {
|
||||
private async squash(
|
||||
snapshot: Snapshot | null,
|
||||
updates: Update[]
|
||||
): Promise<DocResponse> {
|
||||
if (!updates.length) {
|
||||
throw new Error('No updates to squash');
|
||||
}
|
||||
@ -664,7 +699,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
await this.updateCachedUpdatesCount(workspaceId, id, -count);
|
||||
}
|
||||
|
||||
return doc;
|
||||
return { doc, timestamp: last.createdAt.getTime() };
|
||||
}
|
||||
|
||||
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
|
||||
|
@ -246,7 +246,10 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
|
||||
): Promise<EventResponse<Record<string, number>>> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
|
||||
const stats = await this.docManager.getStats(workspaceId, timestamp);
|
||||
const stats = await this.docManager.getDocTimestamps(
|
||||
workspaceId,
|
||||
timestamp
|
||||
);
|
||||
|
||||
return {
|
||||
data: stats,
|
||||
@ -302,13 +305,15 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
|
||||
guid: string;
|
||||
stateVector?: string;
|
||||
}
|
||||
): Promise<EventResponse<{ missing: string; state?: string }>> {
|
||||
): Promise<
|
||||
EventResponse<{ missing: string; state?: string; timestamp: number }>
|
||||
> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
|
||||
const docId = new DocID(guid, workspaceId);
|
||||
const doc = await this.docManager.get(docId.workspace, docId.guid);
|
||||
const res = await this.docManager.get(docId.workspace, docId.guid);
|
||||
|
||||
if (!doc) {
|
||||
if (!res) {
|
||||
return {
|
||||
error: new DocNotFoundError(workspaceId, docId.guid),
|
||||
};
|
||||
@ -316,16 +321,17 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
|
||||
|
||||
const missing = Buffer.from(
|
||||
encodeStateAsUpdate(
|
||||
doc,
|
||||
res.doc,
|
||||
stateVector ? Buffer.from(stateVector, 'base64') : undefined
|
||||
)
|
||||
).toString('base64');
|
||||
const state = Buffer.from(encodeStateVector(doc)).toString('base64');
|
||||
const state = Buffer.from(encodeStateVector(res.doc)).toString('base64');
|
||||
|
||||
return {
|
||||
data: {
|
||||
missing,
|
||||
state,
|
||||
timestamp: res.timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
@ -51,7 +51,7 @@ export class WorkspacesController {
|
||||
// metadata should always exists if body is not null
|
||||
if (metadata) {
|
||||
res.setHeader('content-type', metadata.contentType);
|
||||
res.setHeader('last-modified', metadata.lastModified.toISOString());
|
||||
res.setHeader('last-modified', metadata.lastModified.toUTCString());
|
||||
res.setHeader('content-length', metadata.contentLength);
|
||||
} else {
|
||||
this.logger.warn(`Blob ${workspaceId}/${name} has no metadata`);
|
||||
@ -83,9 +83,12 @@ export class WorkspacesController {
|
||||
throw new ForbiddenException('Permission denied');
|
||||
}
|
||||
|
||||
const update = await this.docManager.getBinary(docId.workspace, docId.guid);
|
||||
const binResponse = await this.docManager.getBinary(
|
||||
docId.workspace,
|
||||
docId.guid
|
||||
);
|
||||
|
||||
if (!update) {
|
||||
if (!binResponse) {
|
||||
throw new NotFoundException('Doc not found');
|
||||
}
|
||||
|
||||
@ -106,8 +109,12 @@ export class WorkspacesController {
|
||||
}
|
||||
|
||||
res.setHeader('content-type', 'application/octet-stream');
|
||||
res.setHeader('cache-control', 'no-cache');
|
||||
res.send(update);
|
||||
res.setHeader(
|
||||
'last-modified',
|
||||
new Date(binResponse.timestamp).toUTCString()
|
||||
);
|
||||
res.setHeader('cache-control', 'private, max-age=2592000');
|
||||
res.send(binResponse.binary);
|
||||
}
|
||||
|
||||
@Get('/:id/docs/:guid/histories/:timestamp')
|
||||
@ -142,7 +149,7 @@ export class WorkspacesController {
|
||||
|
||||
if (history) {
|
||||
res.setHeader('content-type', 'application/octet-stream');
|
||||
res.setHeader('cache-control', 'public, max-age=2592000, immutable');
|
||||
res.setHeader('cache-control', 'private, max-age=2592000, immutable');
|
||||
res.send(history.blob);
|
||||
} else {
|
||||
throw new NotFoundException('Doc history not found');
|
||||
|
@ -127,7 +127,7 @@ test('should merge update when intervel due', async t => {
|
||||
await manager.autoSquash();
|
||||
|
||||
t.deepEqual(
|
||||
(await manager.getBinary(ws.id, '1'))?.toString('hex'),
|
||||
(await manager.getBinary(ws.id, '1'))?.binary.toString('hex'),
|
||||
Buffer.from(update.buffer).toString('hex')
|
||||
);
|
||||
|
||||
@ -150,7 +150,7 @@ test('should merge update when intervel due', async t => {
|
||||
await manager.autoSquash();
|
||||
|
||||
t.deepEqual(
|
||||
(await manager.getBinary(ws.id, '1'))?.toString('hex'),
|
||||
(await manager.getBinary(ws.id, '1'))?.binary.toString('hex'),
|
||||
Buffer.from(encodeStateAsUpdate(doc)).toString('hex')
|
||||
);
|
||||
});
|
||||
@ -275,20 +275,21 @@ test('should throw if meet max retry times', async t => {
|
||||
test('should be able to insert the snapshot if it is new created', async t => {
|
||||
const manager = m.get(DocManager);
|
||||
|
||||
const doc = new YDoc();
|
||||
const text = doc.getText('content');
|
||||
text.insert(0, 'hello');
|
||||
const update = encodeStateAsUpdate(doc);
|
||||
|
||||
await manager.push('1', '1', Buffer.from(update));
|
||||
{
|
||||
const doc = new YDoc();
|
||||
const text = doc.getText('content');
|
||||
text.insert(0, 'hello');
|
||||
const update = encodeStateAsUpdate(doc);
|
||||
|
||||
await manager.push('1', '1', Buffer.from(update));
|
||||
}
|
||||
const updates = await manager.getUpdates('1', '1');
|
||||
t.is(updates.length, 1);
|
||||
// @ts-expect-error private
|
||||
const snapshot = await manager.squash(null, updates);
|
||||
const { doc } = await manager.squash(null, updates);
|
||||
|
||||
t.truthy(snapshot);
|
||||
t.is(snapshot.getText('content').toString(), 'hello');
|
||||
t.truthy(doc);
|
||||
t.is(doc.getText('content').toString(), 'hello');
|
||||
|
||||
const restUpdates = await manager.getUpdates('1', '1');
|
||||
|
||||
@ -315,14 +316,14 @@ test('should be able to merge updates into snapshot', async t => {
|
||||
{
|
||||
await manager.batchPush('1', '1', updates.slice(0, 2));
|
||||
// do the merge
|
||||
const doc = (await manager.get('1', '1'))!;
|
||||
const { doc } = (await manager.get('1', '1'))!;
|
||||
|
||||
t.is(doc.getText('content').toString(), 'helloworld');
|
||||
}
|
||||
|
||||
{
|
||||
await manager.batchPush('1', '1', updates.slice(2));
|
||||
const doc = (await manager.get('1', '1'))!;
|
||||
const { doc } = (await manager.get('1', '1'))!;
|
||||
|
||||
t.is(doc.getText('content').toString(), 'hello world!');
|
||||
}
|
||||
@ -372,7 +373,7 @@ test('should not update snapshot if doc is outdated', async t => {
|
||||
const updateRecords = await manager.getUpdates('2', '1');
|
||||
|
||||
// @ts-expect-error private
|
||||
const doc = await manager.squash(snapshot, updateRecords);
|
||||
const { doc } = await manager.squash(snapshot, updateRecords);
|
||||
|
||||
// all updated will merged into doc not matter it's timestamp is outdated or not,
|
||||
// but the snapshot record will not be updated
|
||||
|
@ -20,6 +20,7 @@
|
||||
"@blocksuite/blocks": "0.13.0-canary-202403140735-2367cd5",
|
||||
"@blocksuite/global": "0.13.0-canary-202403140735-2367cd5",
|
||||
"@blocksuite/store": "0.13.0-canary-202403140735-2367cd5",
|
||||
"@datastructures-js/binary-search-tree": "^5.3.2",
|
||||
"foxact": "^0.2.31",
|
||||
"jotai": "^2.6.5",
|
||||
"jotai-effect": "^0.6.0",
|
||||
|
@ -4,6 +4,7 @@ export * from './blocksuite';
|
||||
export * from './command';
|
||||
export * from './di';
|
||||
export * from './initialization';
|
||||
export * from './lifecycle';
|
||||
export * from './livedata';
|
||||
export * from './page';
|
||||
export * from './storage';
|
||||
|
@ -105,7 +105,7 @@ export async function buildShowcaseWorkspace(
|
||||
|
||||
const { workspace, release } = workspaceManager.open(meta);
|
||||
|
||||
await workspace.engine.sync.waitForLoadedRootDoc();
|
||||
await workspace.engine.waitForRootDocReady();
|
||||
|
||||
const pageRecordList = workspace.services.get(PageRecordList);
|
||||
|
||||
|
@ -11,7 +11,7 @@ import type {
|
||||
import { assertExists } from '@blocksuite/global/utils';
|
||||
import type { DeltaOperation, JobMiddleware } from '@blocksuite/store';
|
||||
|
||||
export const replaceIdMiddleware: JobMiddleware = ({ slots, workspace }) => {
|
||||
export const replaceIdMiddleware: JobMiddleware = ({ slots, collection }) => {
|
||||
const idMap = new Map<string, string>();
|
||||
slots.afterImport.on(payload => {
|
||||
if (
|
||||
@ -61,7 +61,7 @@ export const replaceIdMiddleware: JobMiddleware = ({ slots, workspace }) => {
|
||||
});
|
||||
slots.beforeImport.on(payload => {
|
||||
if (payload.type === 'page') {
|
||||
const newId = workspace.idGenerator('page');
|
||||
const newId = collection.idGenerator('page');
|
||||
idMap.set(payload.snapshot.meta.id, newId);
|
||||
payload.snapshot.meta.id = newId;
|
||||
return;
|
||||
@ -84,7 +84,7 @@ export const replaceIdMiddleware: JobMiddleware = ({ slots, workspace }) => {
|
||||
if (idMap.has(original)) {
|
||||
newId = idMap.get(original)!;
|
||||
} else {
|
||||
newId = workspace.idGenerator('block');
|
||||
newId = collection.idGenerator('block');
|
||||
idMap.set(original, newId);
|
||||
}
|
||||
snapshot.id = newId;
|
||||
@ -96,7 +96,7 @@ export const replaceIdMiddleware: JobMiddleware = ({ slots, workspace }) => {
|
||||
if (idMap.has(original)) {
|
||||
newId = idMap.get(original)!;
|
||||
} else {
|
||||
newId = workspace.idGenerator('block');
|
||||
newId = collection.idGenerator('block');
|
||||
idMap.set(original, newId);
|
||||
}
|
||||
});
|
||||
|
@ -2,11 +2,7 @@ import { isEqual } from 'lodash-es';
|
||||
import { distinctUntilChanged, map, Observable } from 'rxjs';
|
||||
|
||||
import { LiveData } from '../livedata';
|
||||
import {
|
||||
SyncEngineStep,
|
||||
type Workspace,
|
||||
type WorkspaceLocalState,
|
||||
} from '../workspace';
|
||||
import { type Workspace, type WorkspaceLocalState } from '../workspace';
|
||||
import { PageRecord } from './record';
|
||||
|
||||
export class PageRecordList {
|
||||
@ -39,22 +35,8 @@ export class PageRecordList {
|
||||
[]
|
||||
);
|
||||
|
||||
public readonly isReady = LiveData.from<boolean>(
|
||||
new Observable(subscriber => {
|
||||
subscriber.next(
|
||||
this.workspace.engine.status.sync.step === SyncEngineStep.Synced
|
||||
);
|
||||
|
||||
const dispose = this.workspace.engine.onStatusChange.on(() => {
|
||||
subscriber.next(
|
||||
this.workspace.engine.status.sync.step === SyncEngineStep.Synced
|
||||
);
|
||||
}).dispose;
|
||||
return () => {
|
||||
dispose();
|
||||
};
|
||||
}),
|
||||
false
|
||||
public readonly isReady = this.workspace.engine.rootDocState.map(
|
||||
state => !state.syncing
|
||||
);
|
||||
|
||||
public record(id: string) {
|
||||
|
@ -1 +1,2 @@
|
||||
export * from './kv';
|
||||
export * from './memento';
|
||||
|
85
packages/common/infra/src/storage/kv.ts
Normal file
85
packages/common/infra/src/storage/kv.ts
Normal file
@ -0,0 +1,85 @@
|
||||
import { AsyncLock } from '../utils';
|
||||
|
||||
export interface ByteKV extends ByteKVBehavior {
|
||||
transaction<T>(cb: (transaction: ByteKVBehavior) => Promise<T>): Promise<T>;
|
||||
}
|
||||
|
||||
export interface ByteKVBehavior {
|
||||
get(key: string): Promise<Uint8Array | null> | Uint8Array | null;
|
||||
set(key: string, value: Uint8Array): Promise<void> | void;
|
||||
del(key: string): Promise<void> | void;
|
||||
keys(): Promise<string[]> | string[];
|
||||
clear(): Promise<void> | void;
|
||||
}
|
||||
|
||||
export class MemoryByteKV implements ByteKV {
|
||||
readonly lock = new AsyncLock();
|
||||
|
||||
constructor(readonly db = new Map<string, Uint8Array>()) {}
|
||||
|
||||
async transaction<T>(cb: (transaction: ByteKVBehavior) => Promise<T>) {
|
||||
using _lock = await this.lock.acquire();
|
||||
return await cb({
|
||||
get: async key => {
|
||||
return this.db.get(key) ?? null;
|
||||
},
|
||||
set: async (key, value) => {
|
||||
this.db.set(key, value);
|
||||
},
|
||||
keys: async () => {
|
||||
return Array.from(this.db.keys());
|
||||
},
|
||||
del: async key => {
|
||||
this.db.delete(key);
|
||||
},
|
||||
clear: async () => {
|
||||
this.db.clear();
|
||||
},
|
||||
});
|
||||
}
|
||||
get(key: string) {
|
||||
return this.transaction(async tx => tx.get(key));
|
||||
}
|
||||
set(key: string, value: Uint8Array) {
|
||||
return this.transaction(async tx => tx.set(key, value));
|
||||
}
|
||||
keys() {
|
||||
return this.transaction(async tx => tx.keys());
|
||||
}
|
||||
clear() {
|
||||
return this.transaction(async tx => tx.clear());
|
||||
}
|
||||
del(key: string) {
|
||||
return this.transaction(async tx => tx.del(key));
|
||||
}
|
||||
}
|
||||
|
||||
export class ReadonlyByteKV extends MemoryByteKV implements ByteKV {
|
||||
override transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
return super.transaction(tx => {
|
||||
return cb({
|
||||
...tx,
|
||||
set() {
|
||||
return Promise.resolve();
|
||||
},
|
||||
del() {
|
||||
return Promise.resolve();
|
||||
},
|
||||
clear() {
|
||||
return Promise.resolve();
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
override set(_key: string, _value: Uint8Array): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override del(_key: string): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override clear(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
@ -10,6 +10,9 @@ export interface Memento {
|
||||
get<T>(key: string): T | null;
|
||||
watch<T>(key: string): Observable<T | null>;
|
||||
set<T>(key: string, value: T | null): void;
|
||||
del(key: string): void;
|
||||
clear(): void;
|
||||
keys(): string[];
|
||||
}
|
||||
|
||||
/**
|
||||
@ -54,4 +57,43 @@ export class MemoryMemento implements Memento {
|
||||
set<T>(key: string, value: T | null): void {
|
||||
this.getLiveData(key).next(value);
|
||||
}
|
||||
keys(): string[] {
|
||||
return Array.from(this.data.keys());
|
||||
}
|
||||
clear(): void {
|
||||
this.data.clear();
|
||||
}
|
||||
del(key: string): void {
|
||||
this.data.delete(key);
|
||||
}
|
||||
}
|
||||
|
||||
export function wrapMemento(memento: Memento, prefix: string): Memento {
|
||||
return {
|
||||
get<T>(key: string): T | null {
|
||||
return memento.get(prefix + key);
|
||||
},
|
||||
watch(key: string) {
|
||||
return memento.watch(prefix + key);
|
||||
},
|
||||
set<T>(key: string, value: T | null): void {
|
||||
memento.set(prefix + key, value);
|
||||
},
|
||||
keys(): string[] {
|
||||
return memento
|
||||
.keys()
|
||||
.filter(k => k.startsWith(prefix))
|
||||
.map(k => k.slice(prefix.length));
|
||||
},
|
||||
clear() {
|
||||
memento.keys().forEach(k => {
|
||||
if (k.startsWith(prefix)) {
|
||||
memento.del(k);
|
||||
}
|
||||
});
|
||||
},
|
||||
del(key: string): void {
|
||||
memento.del(prefix + key);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
20
packages/common/infra/src/utils/async-lock.ts
Normal file
20
packages/common/infra/src/utils/async-lock.ts
Normal file
@ -0,0 +1,20 @@
|
||||
export class AsyncLock {
|
||||
private _lock = Promise.resolve();
|
||||
|
||||
async acquire() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
let release: () => void = null!;
|
||||
const nextLock = new Promise<void>(resolve => {
|
||||
release = resolve;
|
||||
});
|
||||
|
||||
await this._lock;
|
||||
this._lock = nextLock;
|
||||
return {
|
||||
release,
|
||||
[Symbol.dispose]: () => {
|
||||
release();
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
@ -1,3 +1,4 @@
|
||||
export * from './async-lock';
|
||||
export * from './async-queue';
|
||||
export * from './merge-updates';
|
||||
export * from './object-pool';
|
||||
|
127
packages/common/infra/src/workspace/engine/doc/README.md
Normal file
127
packages/common/infra/src/workspace/engine/doc/README.md
Normal file
@ -0,0 +1,127 @@
|
||||
# DocEngine
|
||||
|
||||
The synchronization algorithm for yjs docs.
|
||||
|
||||
```
|
||||
┌─────────┐ ┌───────────┐ ┌────────┐
|
||||
│ Storage ◄──┤ DocEngine ├──► Server │
|
||||
└─────────┘ └───────────┘ └────────┘
|
||||
```
|
||||
|
||||
# Core Components
|
||||
|
||||
## DocStorage
|
||||
|
||||
```ts
|
||||
export interface DocStorage {
|
||||
eventBus: DocEventBus;
|
||||
doc: ByteKV;
|
||||
syncMetadata: ByteKV;
|
||||
serverClock: ByteKV;
|
||||
}
|
||||
```
|
||||
|
||||
Represents the local storage used, Specific implementations are replaceable, such as `IndexedDBDocStorage` on the `browser` and `SqliteDocStorage` on the `desktop`.
|
||||
|
||||
### DocEventBus
|
||||
|
||||
Each `DocStorage` contains a `DocEventBus`, which is used to communicate with other engines that share the same storage.
|
||||
|
||||
With `DocEventBus` we can sync updates between engines without connecting to the server.
|
||||
|
||||
For example, on the `browser`, we have multiple tabs, all tabs share the same `IndexedDBDocStorage`, so we use `BroadcastChannel` to implement `DocEventBus`, which allows us to broadcast events to all tabs.
|
||||
|
||||
On the `desktop` app, if we have multiple Windows sharing the same `SqliteDocStorage`, we must build a mechanism to broadcast events between all Windows (currently not implemented).
|
||||
|
||||
## DocServer
|
||||
|
||||
```ts
|
||||
export interface DocServer {
|
||||
pullDoc(
|
||||
docId: string,
|
||||
stateVector: Uint8Array
|
||||
): Promise<{
|
||||
data: Uint8Array;
|
||||
serverClock: number;
|
||||
stateVector?: Uint8Array;
|
||||
} | null>;
|
||||
|
||||
pushDoc(docId: string, data: Uint8Array): Promise<{ serverClock: number }>;
|
||||
|
||||
subscribeAllDocs(cb: (updates: { docId: string; data: Uint8Array; serverClock: number }) => void): Promise<() => void>;
|
||||
|
||||
loadServerClock(after: number): Promise<Map<string, number>>;
|
||||
|
||||
waitForConnectingServer(signal: AbortSignal): Promise<void>;
|
||||
disconnectServer(): void;
|
||||
onInterrupted(cb: (reason: string) => void): void;
|
||||
}
|
||||
```
|
||||
|
||||
Represents the server we want to synchronize, there is a simulated implementation in `tests/sync.spec.ts`, and the real implementation is in `packages/backend/server`.
|
||||
|
||||
### ServerClock
|
||||
|
||||
`ServerClock` is a clock generated after each updates is stored in the Server. It is used to determine the order in which updates are stored in the Server.
|
||||
|
||||
The `DocEngine` decides whether to pull updates from the server based on the `ServerClock`.
|
||||
|
||||
The `ServerClock` written later must be **greater** than all previously. So on the client side, we can use `loadServerClock(the largest ServerClock previously received)` to obtain all changed `ServerClock`.
|
||||
|
||||
## DocEngine
|
||||
|
||||
The `DocEngine` is where all the synchronization logic actually happens.
|
||||
|
||||
Due to the complexity of the implementation, we divide it into 2 parts.
|
||||
|
||||
## DocEngine - LocalPart
|
||||
|
||||
Synchronizing **the `YDoc` instance** and **storage**.
|
||||
|
||||
The typical workflow is:
|
||||
|
||||
1. load data from storage, apply to `YDoc` instance.
|
||||
2. track `YDoc` changes
|
||||
3. write the changes back to storage.
|
||||
|
||||
### SeqNum
|
||||
|
||||
There is a `SeqNum` on each Doc data in `Storage`. Every time `LocalPart` writes data, `SeqNum` will be +1.
|
||||
|
||||
There is also a `PushedSeqNum`, which is used for RemotePart later.
|
||||
|
||||
## DocEngine - RemotePart
|
||||
|
||||
Synchronizing `Storage` and `Server`.
|
||||
|
||||
The typical workflow is:
|
||||
|
||||
1. Connect with the server, Load `ServerClocks` for all docs, Start subscribing to server-side updates.
|
||||
|
||||
2. Check whether each doc requires `push` and `pull`
|
||||
|
||||
3. Execute all push and pull
|
||||
|
||||
4. Listen for updates from `LocalPart` and push the updates to the server
|
||||
|
||||
5. Listen for server-side updates and write them to storage.
|
||||
|
||||
### PushedSeqNum
|
||||
|
||||
Each Doc will record a `PushedSeqNum`, used to determine whether the doc has unpush updates.
|
||||
|
||||
After each `push` is completed, `PushedSeqNum` + 1
|
||||
|
||||
If `PushedSeqNum` and `SeqNum` are still different after we complete the push (usually means the previous `push` failed)
|
||||
|
||||
Then do a full pull and push and set `pushedSeqNum` = `SeqNum`
|
||||
|
||||
### PulledServerClock
|
||||
|
||||
Each Doc also record `PulledServerClock`, Used to compare with ServerClock to determine whether to `pull` doc.
|
||||
|
||||
When the `pull` is completed, set `PulledServerClock` = `ServerClock` returned by the server.
|
||||
|
||||
### Retry
|
||||
|
||||
The `RemotePart` may fail at any time, and `RemotePart`'s built-in retry mechanism will restart the process in 5 seconds after failure.
|
@ -0,0 +1,41 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
|
||||
import { PriorityQueue } from '../priority-queue';
|
||||
|
||||
describe('Priority Queue', () => {
|
||||
test('priority', () => {
|
||||
const queue = new PriorityQueue();
|
||||
|
||||
queue.push('foo', 1);
|
||||
queue.push('bar', 2);
|
||||
queue.push('baz', 0);
|
||||
|
||||
expect(queue.pop()).toBe('bar');
|
||||
expect(queue.pop()).toBe('foo');
|
||||
expect(queue.pop()).toBe('baz');
|
||||
expect(queue.pop()).toBe(null);
|
||||
|
||||
queue.push('B', 1);
|
||||
queue.push('A', 1);
|
||||
|
||||
// if priority same then follow id binary order
|
||||
expect(queue.pop()).toBe('B');
|
||||
expect(queue.pop()).toBe('A');
|
||||
expect(queue.pop()).toBe(null);
|
||||
|
||||
queue.push('A', 1);
|
||||
queue.push('B', 2);
|
||||
queue.push('A', 3); // same id but different priority, update the priority
|
||||
|
||||
expect(queue.pop()).toBe('A');
|
||||
expect(queue.pop()).toBe('B');
|
||||
expect(queue.pop()).toBe(null);
|
||||
|
||||
queue.push('A', 1);
|
||||
queue.push('B', 2);
|
||||
queue.remove('B');
|
||||
|
||||
expect(queue.pop()).toBe('A');
|
||||
expect(queue.pop()).toBe(null);
|
||||
});
|
||||
});
|
@ -0,0 +1,234 @@
|
||||
import { nanoid } from 'nanoid';
|
||||
import { describe, expect, test, vitest } from 'vitest';
|
||||
import { Doc as YDoc, encodeStateAsUpdate } from 'yjs';
|
||||
import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs';
|
||||
|
||||
import { AsyncLock } from '../../../../utils';
|
||||
import { DocEngine } from '..';
|
||||
import type { DocServer } from '../server';
|
||||
import { MemoryStorage } from '../storage';
|
||||
import { isEmptyUpdate } from '../utils';
|
||||
|
||||
class MiniServer {
|
||||
lock = new AsyncLock();
|
||||
db = new Map<string, { data: Uint8Array; clock: number }>();
|
||||
listeners = new Set<{
|
||||
cb: (updates: {
|
||||
docId: string;
|
||||
data: Uint8Array;
|
||||
serverClock: number;
|
||||
}) => void;
|
||||
clientId: string;
|
||||
}>();
|
||||
|
||||
client() {
|
||||
return new MiniServerClient(nanoid(), this);
|
||||
}
|
||||
}
|
||||
|
||||
class MiniServerClient implements DocServer {
|
||||
constructor(
|
||||
private readonly id: string,
|
||||
private readonly server: MiniServer
|
||||
) {}
|
||||
|
||||
async pullDoc(docId: string, stateVector: Uint8Array) {
|
||||
using _lock = await this.server.lock.acquire();
|
||||
const doc = this.server.db.get(docId);
|
||||
if (!doc) {
|
||||
return null;
|
||||
}
|
||||
const data = doc.data;
|
||||
return {
|
||||
data:
|
||||
!isEmptyUpdate(data) && stateVector.length > 0
|
||||
? diffUpdate(data, stateVector)
|
||||
: data,
|
||||
serverClock: 0,
|
||||
stateVector: !isEmptyUpdate(data)
|
||||
? encodeStateVectorFromUpdate(data)
|
||||
: new Uint8Array(),
|
||||
};
|
||||
}
|
||||
|
||||
async pushDoc(
|
||||
docId: string,
|
||||
data: Uint8Array
|
||||
): Promise<{ serverClock: number }> {
|
||||
using _lock = await this.server.lock.acquire();
|
||||
const doc = this.server.db.get(docId);
|
||||
const oldData = doc?.data ?? new Uint8Array();
|
||||
const newClock = (doc?.clock ?? 0) + 1;
|
||||
this.server.db.set(docId, {
|
||||
data: !isEmptyUpdate(data)
|
||||
? !isEmptyUpdate(oldData)
|
||||
? mergeUpdates([oldData, data])
|
||||
: data
|
||||
: oldData,
|
||||
clock: newClock,
|
||||
});
|
||||
for (const { clientId, cb } of this.server.listeners) {
|
||||
if (clientId !== this.id) {
|
||||
cb({
|
||||
docId,
|
||||
data,
|
||||
serverClock: newClock,
|
||||
});
|
||||
}
|
||||
}
|
||||
return { serverClock: newClock };
|
||||
}
|
||||
|
||||
async loadServerClock(after: number): Promise<Map<string, number>> {
|
||||
using _lock = await this.server.lock.acquire();
|
||||
const map = new Map<string, number>();
|
||||
|
||||
for (const [docId, { clock }] of this.server.db) {
|
||||
if (clock > after) {
|
||||
map.set(docId, clock);
|
||||
}
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
async subscribeAllDocs(
|
||||
cb: (updates: {
|
||||
docId: string;
|
||||
data: Uint8Array;
|
||||
serverClock: number;
|
||||
}) => void
|
||||
): Promise<() => void> {
|
||||
const listener = { cb, clientId: this.id };
|
||||
this.server.listeners.add(listener);
|
||||
return () => {
|
||||
this.server.listeners.delete(listener);
|
||||
};
|
||||
}
|
||||
|
||||
async waitForConnectingServer(): Promise<void> {}
|
||||
disconnectServer(): void {}
|
||||
onInterrupted(_cb: (reason: string) => void): void {}
|
||||
}
|
||||
|
||||
describe('sync', () => {
|
||||
test('basic sync', async () => {
|
||||
const storage = new MemoryStorage();
|
||||
const server = new MiniServer();
|
||||
const engine = new DocEngine(storage, server.client()).start();
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
engine.addDoc(doc);
|
||||
const map = doc.getMap('aaa');
|
||||
map.set('a', 1);
|
||||
|
||||
await engine.waitForSynced();
|
||||
expect(server.db.size).toBe(1);
|
||||
expect(storage.docDb.keys().length).toBe(1);
|
||||
});
|
||||
|
||||
test('can pull from server', async () => {
|
||||
const server = new MiniServer();
|
||||
{
|
||||
const engine = new DocEngine(
|
||||
new MemoryStorage(),
|
||||
server.client()
|
||||
).start();
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
engine.addDoc(doc);
|
||||
const map = doc.getMap('aaa');
|
||||
map.set('a', 1);
|
||||
await engine.waitForSynced();
|
||||
expect(server.db.size).toBe(1);
|
||||
}
|
||||
{
|
||||
const engine = new DocEngine(
|
||||
new MemoryStorage(),
|
||||
server.client()
|
||||
).start();
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
engine.addDoc(doc);
|
||||
await engine.waitForSynced();
|
||||
expect(doc.getMap('aaa').get('a')).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
test('2 client', async () => {
|
||||
const server = new MiniServer();
|
||||
await Promise.all([
|
||||
(async () => {
|
||||
const engine = new DocEngine(
|
||||
new MemoryStorage(),
|
||||
server.client()
|
||||
).start();
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
engine.addDoc(doc);
|
||||
const map = doc.getMap('aaa');
|
||||
map.set('a', 1);
|
||||
await vitest.waitUntil(() => {
|
||||
return map.get('b') === 2;
|
||||
});
|
||||
})(),
|
||||
(async () => {
|
||||
const engine = new DocEngine(
|
||||
new MemoryStorage(),
|
||||
server.client()
|
||||
).start();
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
engine.addDoc(doc);
|
||||
const map = doc.getMap('aaa');
|
||||
map.set('b', 2);
|
||||
await vitest.waitUntil(() => {
|
||||
return map.get('a') === 1;
|
||||
});
|
||||
})(),
|
||||
]);
|
||||
});
|
||||
|
||||
test('2 client share storage and eventBus (simulate different tabs in same browser)', async () => {
|
||||
const server = new MiniServer();
|
||||
const storage = new MemoryStorage();
|
||||
|
||||
await Promise.all([
|
||||
(async () => {
|
||||
const engine = new DocEngine(storage, server.client()).start();
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
engine.addDoc(doc);
|
||||
|
||||
const map = doc.getMap('aaa');
|
||||
map.set('a', 1);
|
||||
await vitest.waitUntil(() => map.get('b') === 2);
|
||||
})(),
|
||||
(async () => {
|
||||
const engine = new DocEngine(storage, server.client()).start();
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
engine.addDoc(doc);
|
||||
const map = doc.getMap('aaa');
|
||||
map.set('b', 2);
|
||||
await vitest.waitUntil(() => map.get('a') === 1);
|
||||
})(),
|
||||
]);
|
||||
});
|
||||
|
||||
test('legacy data', async () => {
|
||||
const server = new MiniServer();
|
||||
const storage = new MemoryStorage();
|
||||
|
||||
{
|
||||
// write legacy data to storage
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
const map = doc.getMap('aaa');
|
||||
map.set('a', 1);
|
||||
|
||||
await storage.doc.set('a', encodeStateAsUpdate(doc));
|
||||
}
|
||||
|
||||
const engine = new DocEngine(storage, server.client()).start();
|
||||
const doc = new YDoc({ guid: 'a' });
|
||||
engine.addDoc(doc);
|
||||
|
||||
// should load to ydoc and save to server
|
||||
await vitest.waitUntil(
|
||||
() => doc.getMap('aaa').get('a') === 1 && server.db.size === 1
|
||||
);
|
||||
});
|
||||
});
|
@ -0,0 +1,43 @@
|
||||
import { PriorityQueue } from './priority-queue';
|
||||
|
||||
export class AsyncPriorityQueue extends PriorityQueue {
|
||||
private _resolveUpdate: (() => void) | null = null;
|
||||
private _waitForUpdate: Promise<void> | null = null;
|
||||
|
||||
async asyncPop(abort?: AbortSignal): Promise<string> {
|
||||
const update = this.pop();
|
||||
if (update) {
|
||||
return update;
|
||||
} else {
|
||||
if (!this._waitForUpdate) {
|
||||
this._waitForUpdate = new Promise(resolve => {
|
||||
this._resolveUpdate = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
await Promise.race([
|
||||
this._waitForUpdate,
|
||||
new Promise((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]);
|
||||
|
||||
return this.asyncPop(abort);
|
||||
}
|
||||
}
|
||||
|
||||
override push(id: string, priority: number = 0) {
|
||||
super.push(id, priority);
|
||||
if (this._resolveUpdate) {
|
||||
const resolve = this._resolveUpdate;
|
||||
this._resolveUpdate = null;
|
||||
this._waitForUpdate = null;
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
}
|
32
packages/common/infra/src/workspace/engine/doc/clock.ts
Normal file
32
packages/common/infra/src/workspace/engine/doc/clock.ts
Normal file
@ -0,0 +1,32 @@
|
||||
export class ClockMap {
|
||||
max: number = 0;
|
||||
constructor(private readonly map: Map<string, number>) {
|
||||
for (const value of map.values()) {
|
||||
if (value > this.max) {
|
||||
this.max = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get(id: string): number {
|
||||
return this.map.get(id) ?? 0;
|
||||
}
|
||||
|
||||
set(id: string, value: number) {
|
||||
this.map.set(id, value);
|
||||
if (value > this.max) {
|
||||
this.max = value;
|
||||
}
|
||||
}
|
||||
|
||||
setIfBigger(id: string, value: number) {
|
||||
if (value > this.get(id)) {
|
||||
this.set(id, value);
|
||||
}
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.map.clear();
|
||||
this.max = 0;
|
||||
}
|
||||
}
|
55
packages/common/infra/src/workspace/engine/doc/event.ts
Normal file
55
packages/common/infra/src/workspace/engine/doc/event.ts
Normal file
@ -0,0 +1,55 @@
|
||||
export type DocEvent =
|
||||
| {
|
||||
type: 'ClientUpdateCommitted';
|
||||
clientId: string;
|
||||
docId: string;
|
||||
update: Uint8Array;
|
||||
seqNum: number;
|
||||
}
|
||||
| {
|
||||
type: 'ServerUpdateCommitted';
|
||||
docId: string;
|
||||
update: Uint8Array;
|
||||
clientId: string;
|
||||
}
|
||||
| {
|
||||
type: 'LegacyClientUpdateCommitted';
|
||||
docId: string;
|
||||
update: Uint8Array;
|
||||
};
|
||||
|
||||
export interface DocEventBus {
|
||||
emit(event: DocEvent): void;
|
||||
on(cb: (event: DocEvent) => void): () => void;
|
||||
}
|
||||
|
||||
export class MemoryDocEventBus implements DocEventBus {
|
||||
listeners = new Set<(event: DocEvent) => void>();
|
||||
emit(event: DocEvent): void {
|
||||
for (const listener of this.listeners) {
|
||||
try {
|
||||
listener(event);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
on(cb: (event: DocEvent) => void): () => void {
|
||||
this.listeners.add(cb);
|
||||
return () => {
|
||||
this.listeners.delete(cb);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class DocEventBusInner implements DocEventBus {
|
||||
constructor(private readonly eventBusBehavior: DocEventBus) {}
|
||||
|
||||
emit(event: DocEvent) {
|
||||
this.eventBusBehavior.emit(event);
|
||||
}
|
||||
|
||||
on(cb: (event: DocEvent) => void) {
|
||||
return this.eventBusBehavior.on(cb);
|
||||
}
|
||||
}
|
187
packages/common/infra/src/workspace/engine/doc/index.ts
Normal file
187
packages/common/infra/src/workspace/engine/doc/index.ts
Normal file
@ -0,0 +1,187 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { map } from 'rxjs';
|
||||
import type { Doc as YDoc } from 'yjs';
|
||||
|
||||
import { createIdentifier } from '../../../di';
|
||||
import { LiveData } from '../../../livedata';
|
||||
import { MANUALLY_STOP } from '../../../utils';
|
||||
import { DocEngineLocalPart } from './local';
|
||||
import { DocEngineRemotePart } from './remote';
|
||||
import type { DocServer } from './server';
|
||||
import { type DocStorage, DocStorageInner } from './storage';
|
||||
|
||||
const logger = new DebugLogger('doc-engine');
|
||||
|
||||
export type { DocEvent, DocEventBus } from './event';
|
||||
export { MemoryDocEventBus } from './event';
|
||||
export type { DocServer } from './server';
|
||||
export type { DocStorage } from './storage';
|
||||
export {
|
||||
MemoryStorage as MemoryDocStorage,
|
||||
ReadonlyStorage as ReadonlyDocStorage,
|
||||
} from './storage';
|
||||
|
||||
export const DocServerImpl = createIdentifier<DocServer>('DocServer');
|
||||
|
||||
export const DocStorageImpl = createIdentifier<DocStorage>('DocStorage');
|
||||
|
||||
export class DocEngine {
|
||||
localPart: DocEngineLocalPart;
|
||||
remotePart: DocEngineRemotePart | null;
|
||||
|
||||
storage: DocStorageInner;
|
||||
|
||||
engineState = LiveData.computed(get => {
|
||||
const localState = get(this.localPart.engineState);
|
||||
if (this.remotePart) {
|
||||
const remoteState = get(this.remotePart?.engineState);
|
||||
return {
|
||||
total: remoteState.total,
|
||||
syncing: remoteState.syncing,
|
||||
saving: localState.syncing,
|
||||
retrying: remoteState.retrying,
|
||||
errorMessage: remoteState.errorMessage,
|
||||
};
|
||||
}
|
||||
return {
|
||||
total: localState.total,
|
||||
syncing: localState.syncing,
|
||||
saving: localState.syncing,
|
||||
retrying: false,
|
||||
errorMessage: null,
|
||||
};
|
||||
});
|
||||
|
||||
docState(docId: string) {
|
||||
const localState = this.localPart.docState(docId);
|
||||
const remoteState = this.remotePart?.docState(docId);
|
||||
return LiveData.computed(get => {
|
||||
const local = get(localState);
|
||||
const remote = remoteState ? get(remoteState) : null;
|
||||
return {
|
||||
ready: local.ready,
|
||||
saving: local.syncing,
|
||||
syncing: local.syncing || remote?.syncing,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
constructor(
|
||||
storage: DocStorage,
|
||||
private readonly server?: DocServer | null
|
||||
) {
|
||||
const clientId = nanoid();
|
||||
this.storage = new DocStorageInner(storage);
|
||||
this.localPart = new DocEngineLocalPart(clientId, this.storage);
|
||||
this.remotePart = this.server
|
||||
? new DocEngineRemotePart(clientId, this.storage, this.server)
|
||||
: null;
|
||||
}
|
||||
|
||||
abort = new AbortController();
|
||||
|
||||
start() {
|
||||
this.abort.abort(MANUALLY_STOP);
|
||||
this.abort = new AbortController();
|
||||
Promise.all([
|
||||
this.localPart.mainLoop(this.abort.signal),
|
||||
this.remotePart?.mainLoop(this.abort.signal),
|
||||
]).catch(err => {
|
||||
if (err === MANUALLY_STOP) {
|
||||
return;
|
||||
}
|
||||
logger.error('Doc engine error', err);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
stop() {
|
||||
this.abort.abort(MANUALLY_STOP);
|
||||
}
|
||||
|
||||
async resetSyncStatus() {
|
||||
this.stop();
|
||||
await this.storage.clearSyncMetadata();
|
||||
await this.storage.clearServerClock();
|
||||
}
|
||||
|
||||
addDoc(doc: YDoc, withSubDocs = true) {
|
||||
this.localPart.actions.addDoc(doc);
|
||||
this.remotePart?.actions.addDoc(doc.guid);
|
||||
|
||||
if (withSubDocs) {
|
||||
const subdocs = doc.getSubdocs();
|
||||
for (const subdoc of subdocs) {
|
||||
this.addDoc(subdoc, false);
|
||||
}
|
||||
doc.on('subdocs', ({ added }: { added: Set<YDoc> }) => {
|
||||
for (const subdoc of added) {
|
||||
this.addDoc(subdoc, false);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
setPriority(docId: string, priority: number) {
|
||||
this.localPart.setPriority(docId, priority);
|
||||
this.remotePart?.setPriority(docId, priority);
|
||||
}
|
||||
|
||||
/**
|
||||
* ## Saved:
|
||||
* YDoc changes have been saved to storage, and the browser can be safely closed without losing data.
|
||||
*/
|
||||
waitForSaved() {
|
||||
return new Promise<void>(resolve => {
|
||||
this.engineState
|
||||
.pipe(map(state => state.saving === 0))
|
||||
.subscribe(saved => {
|
||||
if (saved) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* ## Synced:
|
||||
* is fully synchronized with the server
|
||||
*/
|
||||
waitForSynced() {
|
||||
return new Promise<void>(resolve => {
|
||||
this.engineState
|
||||
.pipe(map(state => state.syncing === 0 && state.saving === 0))
|
||||
.subscribe(synced => {
|
||||
if (synced) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* ## Ready:
|
||||
*
|
||||
* means that the doc has been loaded and the data can be modified.
|
||||
* (is not force, you can still modify it if you know you are creating some new data)
|
||||
*
|
||||
* this is a temporary solution to deal with the yjs overwrite issue.
|
||||
*
|
||||
* if content is loaded from storage
|
||||
* or if content is pulled from the server, it will be true, otherwise be false.
|
||||
*
|
||||
* For example, when opening a doc that is not in storage, ready = false until the content is pulled from the server.
|
||||
*/
|
||||
waitForReady(docId: string) {
|
||||
return new Promise<void>(resolve => {
|
||||
this.docState(docId)
|
||||
.pipe(map(state => state.ready))
|
||||
.subscribe(ready => {
|
||||
if (ready) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
302
packages/common/infra/src/workspace/engine/doc/local.ts
Normal file
302
packages/common/infra/src/workspace/engine/doc/local.ts
Normal file
@ -0,0 +1,302 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { Unreachable } from '@affine/env/constant';
|
||||
import { groupBy } from 'lodash-es';
|
||||
import { Observable, Subject } from 'rxjs';
|
||||
import type { Doc as YDoc } from 'yjs';
|
||||
import { applyUpdate, encodeStateAsUpdate, mergeUpdates } from 'yjs';
|
||||
|
||||
import { LiveData } from '../../../livedata';
|
||||
import { throwIfAborted } from '../../../utils';
|
||||
import { AsyncPriorityQueue } from './async-priority-queue';
|
||||
import type { DocEvent } from './event';
|
||||
import type { DocStorageInner } from './storage';
|
||||
import { isEmptyUpdate } from './utils';
|
||||
|
||||
type Job =
|
||||
| {
|
||||
type: 'load';
|
||||
docId: string;
|
||||
}
|
||||
| {
|
||||
type: 'save';
|
||||
docId: string;
|
||||
update: Uint8Array;
|
||||
}
|
||||
| {
|
||||
type: 'apply';
|
||||
docId: string;
|
||||
update: Uint8Array;
|
||||
isInitialize: boolean;
|
||||
};
|
||||
|
||||
const DOC_ENGINE_ORIGIN = 'doc-engine';
|
||||
|
||||
const logger = new DebugLogger('doc-engine:local');
|
||||
|
||||
export interface LocalEngineState {
|
||||
total: number;
|
||||
syncing: number;
|
||||
}
|
||||
|
||||
export interface LocalDocState {
|
||||
ready: boolean;
|
||||
syncing: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* never fail
|
||||
*/
|
||||
export class DocEngineLocalPart {
|
||||
private readonly prioritySettings = new Map<string, number>();
|
||||
private readonly statusUpdatedSubject = new Subject<string>();
|
||||
|
||||
private readonly status = {
|
||||
docs: new Map<string, YDoc>(),
|
||||
connectedDocs: new Set<string>(),
|
||||
readyDocs: new Set<string>(),
|
||||
jobDocQueue: new AsyncPriorityQueue(),
|
||||
jobMap: new Map<string, Job[]>(),
|
||||
currentJob: null as { docId: string; jobs: Job[] } | null,
|
||||
};
|
||||
|
||||
engineState = LiveData.from<LocalEngineState>(
|
||||
new Observable(subscribe => {
|
||||
const next = () => {
|
||||
subscribe.next({
|
||||
total: this.status.docs.size,
|
||||
syncing: this.status.jobMap.size + (this.status.currentJob ? 1 : 0),
|
||||
});
|
||||
};
|
||||
next();
|
||||
return this.statusUpdatedSubject.subscribe(() => {
|
||||
next();
|
||||
});
|
||||
}),
|
||||
{ syncing: 0, total: 0 }
|
||||
);
|
||||
|
||||
docState(docId: string) {
|
||||
return LiveData.from<LocalDocState>(
|
||||
new Observable(subscribe => {
|
||||
const next = () => {
|
||||
subscribe.next({
|
||||
ready: this.status.readyDocs.has(docId) ?? false,
|
||||
syncing:
|
||||
(this.status.jobMap.get(docId)?.length ?? 0) > 0 ||
|
||||
this.status.currentJob?.docId === docId,
|
||||
});
|
||||
};
|
||||
next();
|
||||
return this.statusUpdatedSubject.subscribe(updatedId => {
|
||||
if (updatedId === docId) next();
|
||||
});
|
||||
}),
|
||||
{ ready: false, syncing: false }
|
||||
);
|
||||
}
|
||||
|
||||
constructor(
|
||||
private readonly clientId: string,
|
||||
private readonly storage: DocStorageInner
|
||||
) {}
|
||||
|
||||
async mainLoop(signal?: AbortSignal) {
|
||||
const dispose = this.storage.eventBus.on(event => {
|
||||
const handler = this.events[event.type];
|
||||
if (handler) {
|
||||
handler(event as any);
|
||||
}
|
||||
});
|
||||
try {
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
throwIfAborted(signal);
|
||||
const docId = await this.status.jobDocQueue.asyncPop(signal);
|
||||
const jobs = this.status.jobMap.get(docId);
|
||||
this.status.jobMap.delete(docId);
|
||||
|
||||
if (!jobs) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.status.currentJob = { docId, jobs };
|
||||
this.statusUpdatedSubject.next(docId);
|
||||
|
||||
const { apply, load, save } = groupBy(jobs, job => job.type) as {
|
||||
[key in Job['type']]?: Job[];
|
||||
};
|
||||
|
||||
if (load?.length) {
|
||||
await this.jobs.load(load[0] as any, signal);
|
||||
}
|
||||
|
||||
for (const applyJob of apply ?? []) {
|
||||
await this.jobs.apply(applyJob as any, signal);
|
||||
}
|
||||
|
||||
if (save?.length) {
|
||||
await this.jobs.save(docId, save as any, signal);
|
||||
}
|
||||
|
||||
this.status.currentJob = null;
|
||||
this.statusUpdatedSubject.next(docId);
|
||||
}
|
||||
} finally {
|
||||
dispose();
|
||||
|
||||
for (const docs of this.status.connectedDocs) {
|
||||
const doc = this.status.docs.get(docs);
|
||||
if (doc) {
|
||||
doc.off('update', this.handleDocUpdate);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
readonly actions = {
|
||||
addDoc: (doc: YDoc) => {
|
||||
this.schedule({
|
||||
type: 'load',
|
||||
docId: doc.guid,
|
||||
});
|
||||
|
||||
this.status.docs.set(doc.guid, doc);
|
||||
this.statusUpdatedSubject.next(doc.guid);
|
||||
},
|
||||
};
|
||||
|
||||
readonly jobs = {
|
||||
load: async (job: Job & { type: 'load' }, signal?: AbortSignal) => {
|
||||
const doc = this.status.docs.get(job.docId);
|
||||
if (!doc) {
|
||||
throw new Unreachable('doc not found');
|
||||
}
|
||||
const existingData = encodeStateAsUpdate(doc);
|
||||
|
||||
if (!isEmptyUpdate(existingData)) {
|
||||
this.schedule({
|
||||
type: 'save',
|
||||
docId: doc.guid,
|
||||
update: existingData,
|
||||
});
|
||||
}
|
||||
|
||||
// mark doc as loaded
|
||||
doc.emit('sync', [true]);
|
||||
doc.on('update', this.handleDocUpdate);
|
||||
|
||||
this.status.connectedDocs.add(job.docId);
|
||||
this.statusUpdatedSubject.next(job.docId);
|
||||
|
||||
const docData = await this.storage.loadDocFromLocal(job.docId, signal);
|
||||
|
||||
if (!docData || isEmptyUpdate(docData)) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.applyUpdate(job.docId, docData);
|
||||
this.status.readyDocs.add(job.docId);
|
||||
this.statusUpdatedSubject.next(job.docId);
|
||||
},
|
||||
save: async (
|
||||
docId: string,
|
||||
jobs: (Job & { type: 'save' })[],
|
||||
signal?: AbortSignal
|
||||
) => {
|
||||
if (this.status.connectedDocs.has(docId)) {
|
||||
const merged = mergeUpdates(
|
||||
jobs.map(j => j.update).filter(update => !isEmptyUpdate(update))
|
||||
);
|
||||
const newSeqNum = await this.storage.commitDocAsClientUpdate(
|
||||
docId,
|
||||
merged,
|
||||
signal
|
||||
);
|
||||
this.storage.eventBus.emit({
|
||||
type: 'ClientUpdateCommitted',
|
||||
seqNum: newSeqNum,
|
||||
docId: docId,
|
||||
clientId: this.clientId,
|
||||
update: merged,
|
||||
});
|
||||
}
|
||||
},
|
||||
apply: async (job: Job & { type: 'apply' }, signal?: AbortSignal) => {
|
||||
throwIfAborted(signal);
|
||||
if (this.status.connectedDocs.has(job.docId)) {
|
||||
this.applyUpdate(job.docId, job.update);
|
||||
}
|
||||
if (job.isInitialize && !isEmptyUpdate(job.update)) {
|
||||
this.status.readyDocs.add(job.docId);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
readonly events: {
|
||||
[key in DocEvent['type']]?: (event: DocEvent & { type: key }) => void;
|
||||
} = {
|
||||
ServerUpdateCommitted: ({ docId, update, clientId }) => {
|
||||
this.schedule({
|
||||
type: 'apply',
|
||||
docId,
|
||||
update,
|
||||
isInitialize: clientId === this.clientId,
|
||||
});
|
||||
},
|
||||
ClientUpdateCommitted: ({ docId, update, clientId }) => {
|
||||
if (clientId !== this.clientId) {
|
||||
this.schedule({
|
||||
type: 'apply',
|
||||
docId,
|
||||
update,
|
||||
isInitialize: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
LegacyClientUpdateCommitted: ({ docId, update }) => {
|
||||
this.schedule({
|
||||
type: 'save',
|
||||
docId,
|
||||
update,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
handleDocUpdate = (update: Uint8Array, origin: any, doc: YDoc) => {
|
||||
if (origin === DOC_ENGINE_ORIGIN) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.schedule({
|
||||
type: 'save',
|
||||
docId: doc.guid,
|
||||
update,
|
||||
});
|
||||
};
|
||||
|
||||
applyUpdate(docId: string, update: Uint8Array) {
|
||||
const doc = this.status.docs.get(docId);
|
||||
if (doc && !isEmptyUpdate(update)) {
|
||||
try {
|
||||
applyUpdate(doc, update, DOC_ENGINE_ORIGIN);
|
||||
} catch (err) {
|
||||
logger;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
schedule(job: Job) {
|
||||
const priority = this.prioritySettings.get(job.docId) ?? 0;
|
||||
this.status.jobDocQueue.push(job.docId, priority);
|
||||
|
||||
const existingJobs = this.status.jobMap.get(job.docId) ?? [];
|
||||
existingJobs.push(job);
|
||||
this.status.jobMap.set(job.docId, existingJobs);
|
||||
this.statusUpdatedSubject.next(job.docId);
|
||||
}
|
||||
|
||||
setPriority(docId: string, priority: number) {
|
||||
this.prioritySettings.set(docId, priority);
|
||||
this.status.jobDocQueue.updatePriority(docId, priority);
|
||||
}
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
import { BinarySearchTree } from '@datastructures-js/binary-search-tree';
|
||||
|
||||
export class PriorityQueue {
|
||||
tree = new BinarySearchTree<{ id: string; priority: number }>((a, b) => {
|
||||
return a.priority === b.priority
|
||||
? a.id === b.id
|
||||
? 0
|
||||
: a.id > b.id
|
||||
? 1
|
||||
: -1
|
||||
: a.priority - b.priority;
|
||||
});
|
||||
priorityMap = new Map<string, number>();
|
||||
|
||||
push(id: string, priority: number = 0) {
|
||||
const oldPriority = this.priorityMap.get(id);
|
||||
if (oldPriority === priority) {
|
||||
return;
|
||||
}
|
||||
if (oldPriority !== undefined) {
|
||||
this.remove(id);
|
||||
}
|
||||
this.tree.insert({ id, priority });
|
||||
this.priorityMap.set(id, priority);
|
||||
}
|
||||
|
||||
pop() {
|
||||
const node = this.tree.max();
|
||||
|
||||
if (!node) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this.tree.removeNode(node);
|
||||
|
||||
const { id } = node.getValue();
|
||||
this.priorityMap.delete(id);
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
remove(id: string, priority?: number) {
|
||||
priority ??= this.priorityMap.get(id);
|
||||
if (priority === undefined) {
|
||||
return false;
|
||||
}
|
||||
const removed = this.tree.remove({ id, priority });
|
||||
if (removed) {
|
||||
this.priorityMap.delete(id);
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.tree.clear();
|
||||
this.priorityMap.clear();
|
||||
}
|
||||
|
||||
updatePriority(id: string, priority: number) {
|
||||
if (this.remove(id)) {
|
||||
this.push(id, priority);
|
||||
}
|
||||
}
|
||||
|
||||
get length() {
|
||||
return this.tree.count;
|
||||
}
|
||||
}
|
545
packages/common/infra/src/workspace/engine/doc/remote.ts
Normal file
545
packages/common/infra/src/workspace/engine/doc/remote.ts
Normal file
@ -0,0 +1,545 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { remove } from 'lodash-es';
|
||||
import { Observable, Subject } from 'rxjs';
|
||||
import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs';
|
||||
|
||||
import { LiveData } from '../../../livedata';
|
||||
import { throwIfAborted } from '../../../utils';
|
||||
import { AsyncPriorityQueue } from './async-priority-queue';
|
||||
import { ClockMap } from './clock';
|
||||
import type { DocEvent } from './event';
|
||||
import type { DocServer } from './server';
|
||||
import type { DocStorageInner } from './storage';
|
||||
import { isEmptyUpdate } from './utils';
|
||||
|
||||
const logger = new DebugLogger('doc-engine:remote');
|
||||
|
||||
type Job =
|
||||
| {
|
||||
type: 'connect';
|
||||
docId: string;
|
||||
}
|
||||
| {
|
||||
type: 'push';
|
||||
docId: string;
|
||||
update: Uint8Array;
|
||||
seqNum: number;
|
||||
}
|
||||
| {
|
||||
type: 'pull';
|
||||
docId: string;
|
||||
}
|
||||
| {
|
||||
type: 'pullAndPush';
|
||||
docId: string;
|
||||
}
|
||||
| {
|
||||
type: 'save';
|
||||
docId: string;
|
||||
update?: Uint8Array;
|
||||
serverClock: number;
|
||||
};
|
||||
|
||||
export interface Status {
|
||||
docs: Set<string>;
|
||||
connectedDocs: Set<string>;
|
||||
jobDocQueue: AsyncPriorityQueue;
|
||||
jobMap: Map<string, Job[]>;
|
||||
serverClocks: ClockMap;
|
||||
syncing: boolean;
|
||||
retrying: boolean;
|
||||
errorMessage: string | null;
|
||||
}
|
||||
|
||||
export interface RemoteEngineState {
|
||||
total: number;
|
||||
syncing: number;
|
||||
retrying: boolean;
|
||||
errorMessage: string | null;
|
||||
}
|
||||
|
||||
export interface RemoteDocState {
|
||||
syncing: boolean;
|
||||
}
|
||||
|
||||
export class DocEngineRemotePart {
|
||||
private readonly prioritySettings = new Map<string, number>();
|
||||
|
||||
constructor(
|
||||
private readonly clientId: string,
|
||||
private readonly storage: DocStorageInner,
|
||||
private readonly server: DocServer
|
||||
) {}
|
||||
|
||||
private status: Status = {
|
||||
docs: new Set<string>(),
|
||||
connectedDocs: new Set<string>(),
|
||||
jobDocQueue: new AsyncPriorityQueue(),
|
||||
jobMap: new Map(),
|
||||
serverClocks: new ClockMap(new Map()),
|
||||
syncing: false,
|
||||
retrying: false,
|
||||
errorMessage: null,
|
||||
};
|
||||
private readonly statusUpdatedSubject = new Subject<string | true>();
|
||||
|
||||
engineState = LiveData.from<RemoteEngineState>(
|
||||
new Observable(subscribe => {
|
||||
const next = () => {
|
||||
if (!this.status.syncing) {
|
||||
subscribe.next({
|
||||
total: this.status.docs.size,
|
||||
syncing: this.status.docs.size,
|
||||
retrying: this.status.retrying,
|
||||
errorMessage: this.status.errorMessage,
|
||||
});
|
||||
}
|
||||
const syncing = this.status.jobMap.size;
|
||||
subscribe.next({
|
||||
total: this.status.docs.size,
|
||||
syncing: syncing,
|
||||
retrying: this.status.retrying,
|
||||
errorMessage: this.status.errorMessage,
|
||||
});
|
||||
};
|
||||
next();
|
||||
return this.statusUpdatedSubject.subscribe(() => {
|
||||
next();
|
||||
});
|
||||
}),
|
||||
{
|
||||
syncing: 0,
|
||||
total: 0,
|
||||
retrying: false,
|
||||
errorMessage: null,
|
||||
}
|
||||
);
|
||||
|
||||
docState(docId: string) {
|
||||
return LiveData.from<RemoteDocState>(
|
||||
new Observable(subscribe => {
|
||||
const next = () => {
|
||||
subscribe.next({
|
||||
syncing:
|
||||
!this.status.connectedDocs.has(docId) ||
|
||||
this.status.jobMap.has(docId),
|
||||
});
|
||||
};
|
||||
next();
|
||||
return this.statusUpdatedSubject.subscribe(updatedId => {
|
||||
if (updatedId === true || updatedId === docId) next();
|
||||
});
|
||||
}),
|
||||
{ syncing: false }
|
||||
);
|
||||
}
|
||||
|
||||
readonly jobs = {
|
||||
connect: async (docId: string, signal?: AbortSignal) => {
|
||||
const pushedSeqNum = await this.storage.loadDocSeqNumPushed(
|
||||
docId,
|
||||
signal
|
||||
);
|
||||
const seqNum = await this.storage.loadDocSeqNum(docId, signal);
|
||||
|
||||
if (pushedSeqNum === null || pushedSeqNum !== seqNum) {
|
||||
await this.jobs.pullAndPush(docId, signal);
|
||||
} else {
|
||||
const pulled = await this.storage.loadDocServerClockPulled(docId);
|
||||
if (pulled === null || pulled !== this.status.serverClocks.get(docId)) {
|
||||
await this.jobs.pull(docId, signal);
|
||||
}
|
||||
}
|
||||
|
||||
this.status.connectedDocs.add(docId);
|
||||
this.statusUpdatedSubject.next(docId);
|
||||
},
|
||||
push: async (
|
||||
docId: string,
|
||||
jobs: (Job & { type: 'push' })[],
|
||||
signal?: AbortSignal
|
||||
) => {
|
||||
if (this.status.connectedDocs.has(docId)) {
|
||||
const maxSeqNum = Math.max(...jobs.map(j => j.seqNum));
|
||||
const pushedSeqNum =
|
||||
(await this.storage.loadDocSeqNumPushed(docId, signal)) ?? 0;
|
||||
|
||||
if (maxSeqNum - pushedSeqNum === jobs.length) {
|
||||
const merged = mergeUpdates(
|
||||
jobs.map(j => j.update).filter(update => !isEmptyUpdate(update))
|
||||
);
|
||||
if (!isEmptyUpdate(merged)) {
|
||||
const { serverClock } = await this.server.pushDoc(docId, merged);
|
||||
this.schedule({
|
||||
type: 'save',
|
||||
docId,
|
||||
serverClock,
|
||||
});
|
||||
}
|
||||
await this.storage.saveDocPushedSeqNum(
|
||||
docId,
|
||||
{ add: jobs.length },
|
||||
signal
|
||||
);
|
||||
} else {
|
||||
// maybe other tab is modifying the doc, do full pull and push for safety
|
||||
await this.jobs.pullAndPush(docId, signal);
|
||||
}
|
||||
}
|
||||
},
|
||||
pullAndPush: async (docId: string, signal?: AbortSignal) => {
|
||||
const seqNum = await this.storage.loadDocSeqNum(docId, signal);
|
||||
const data = await this.storage.loadDocFromLocal(docId, signal);
|
||||
|
||||
const stateVector =
|
||||
data && !isEmptyUpdate(data)
|
||||
? encodeStateVectorFromUpdate(data)
|
||||
: new Uint8Array();
|
||||
const serverData = await this.server.pullDoc(docId, stateVector);
|
||||
|
||||
if (serverData) {
|
||||
const {
|
||||
data: newData,
|
||||
stateVector: serverStateVector,
|
||||
serverClock,
|
||||
} = serverData;
|
||||
await this.storage.saveServerClock(
|
||||
new Map([[docId, serverClock]]),
|
||||
signal
|
||||
);
|
||||
this.actions.updateServerClock(docId, serverClock);
|
||||
await this.storage.commitDocAsServerUpdate(
|
||||
docId,
|
||||
newData,
|
||||
serverClock,
|
||||
signal
|
||||
);
|
||||
this.storage.eventBus.emit({
|
||||
type: 'ServerUpdateCommitted',
|
||||
docId,
|
||||
clientId: this.clientId,
|
||||
update: newData,
|
||||
});
|
||||
const diff =
|
||||
data && serverStateVector && serverStateVector.length > 0
|
||||
? diffUpdate(data, serverStateVector)
|
||||
: data;
|
||||
if (diff && !isEmptyUpdate(diff)) {
|
||||
const { serverClock } = await this.server.pushDoc(docId, diff);
|
||||
this.schedule({
|
||||
type: 'save',
|
||||
docId,
|
||||
serverClock,
|
||||
});
|
||||
}
|
||||
await this.storage.saveDocPushedSeqNum(docId, seqNum, signal);
|
||||
} else {
|
||||
if (data && !isEmptyUpdate(data)) {
|
||||
const { serverClock } = await this.server.pushDoc(docId, data);
|
||||
await this.storage.saveDocServerClockPulled(
|
||||
docId,
|
||||
serverClock,
|
||||
signal
|
||||
);
|
||||
await this.storage.saveServerClock(
|
||||
new Map([[docId, serverClock]]),
|
||||
signal
|
||||
);
|
||||
this.actions.updateServerClock(docId, serverClock);
|
||||
}
|
||||
await this.storage.saveDocPushedSeqNum(docId, seqNum, signal);
|
||||
}
|
||||
},
|
||||
pull: async (docId: string, signal?: AbortSignal) => {
|
||||
const data = await this.storage.loadDocFromLocal(docId, signal);
|
||||
|
||||
const stateVector =
|
||||
data && !isEmptyUpdate(data)
|
||||
? encodeStateVectorFromUpdate(data)
|
||||
: new Uint8Array();
|
||||
const serverDoc = await this.server.pullDoc(docId, stateVector);
|
||||
if (!serverDoc) {
|
||||
return;
|
||||
}
|
||||
const { data: newData, serverClock } = serverDoc;
|
||||
await this.storage.commitDocAsServerUpdate(
|
||||
docId,
|
||||
newData,
|
||||
serverClock,
|
||||
signal
|
||||
);
|
||||
this.storage.eventBus.emit({
|
||||
type: 'ServerUpdateCommitted',
|
||||
docId,
|
||||
clientId: this.clientId,
|
||||
update: newData,
|
||||
});
|
||||
await this.storage.saveServerClock(
|
||||
new Map([[docId, serverClock]]),
|
||||
signal
|
||||
);
|
||||
this.actions.updateServerClock(docId, serverClock);
|
||||
},
|
||||
save: async (
|
||||
docId: string,
|
||||
jobs: (Job & { type: 'save' })[],
|
||||
signal?: AbortSignal
|
||||
) => {
|
||||
const serverClock = jobs.reduce((a, b) => Math.max(a, b.serverClock), 0);
|
||||
await this.storage.saveServerClock(
|
||||
new Map([[docId, serverClock]]),
|
||||
signal
|
||||
);
|
||||
this.actions.updateServerClock(docId, serverClock);
|
||||
if (this.status.connectedDocs.has(docId)) {
|
||||
const data = jobs
|
||||
.map(j => j.update)
|
||||
.filter((update): update is Uint8Array =>
|
||||
update ? !isEmptyUpdate(update) : false
|
||||
);
|
||||
const update = data.length > 0 ? mergeUpdates(data) : new Uint8Array();
|
||||
await this.storage.commitDocAsServerUpdate(
|
||||
docId,
|
||||
update,
|
||||
serverClock,
|
||||
signal
|
||||
);
|
||||
this.storage.eventBus.emit({
|
||||
type: 'ServerUpdateCommitted',
|
||||
docId,
|
||||
clientId: this.clientId,
|
||||
update,
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
readonly actions = {
|
||||
updateServerClock: (docId: string, serverClock: number) => {
|
||||
this.status.serverClocks.setIfBigger(docId, serverClock);
|
||||
},
|
||||
addDoc: (docId: string) => {
|
||||
if (!this.status.docs.has(docId)) {
|
||||
this.status.docs.add(docId);
|
||||
this.statusUpdatedSubject.next(docId);
|
||||
this.schedule({
|
||||
type: 'connect',
|
||||
docId,
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
readonly events: {
|
||||
[key in DocEvent['type']]?: (event: DocEvent & { type: key }) => void;
|
||||
} = {
|
||||
ClientUpdateCommitted: ({ clientId, docId, seqNum, update }) => {
|
||||
if (clientId !== this.clientId) {
|
||||
return;
|
||||
}
|
||||
this.schedule({
|
||||
type: 'push',
|
||||
docId,
|
||||
update,
|
||||
seqNum,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
async mainLoop(signal?: AbortSignal) {
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
try {
|
||||
this.status.retrying = false;
|
||||
await this.retryLoop(signal);
|
||||
} catch (err) {
|
||||
if (signal?.aborted) {
|
||||
return;
|
||||
}
|
||||
logger.error('Remote sync error, retry in 5s', err);
|
||||
this.status.errorMessage =
|
||||
err instanceof Error ? err.message : `${err}`;
|
||||
this.statusUpdatedSubject.next(true);
|
||||
} finally {
|
||||
this.status = {
|
||||
docs: this.status.docs,
|
||||
connectedDocs: new Set<string>(),
|
||||
jobDocQueue: new AsyncPriorityQueue(),
|
||||
jobMap: new Map(),
|
||||
serverClocks: new ClockMap(new Map()),
|
||||
syncing: false,
|
||||
retrying: true,
|
||||
errorMessage: this.status.errorMessage,
|
||||
};
|
||||
this.statusUpdatedSubject.next(true);
|
||||
}
|
||||
await Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
setTimeout(resolve, 5 * 1000);
|
||||
}),
|
||||
new Promise((_, reject) => {
|
||||
// exit if manually stopped
|
||||
if (signal?.aborted) {
|
||||
reject(signal.reason);
|
||||
}
|
||||
signal?.addEventListener('abort', () => {
|
||||
reject(signal.reason);
|
||||
});
|
||||
}),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
async retryLoop(signal?: AbortSignal) {
|
||||
throwIfAborted(signal);
|
||||
const abort = new AbortController();
|
||||
|
||||
signal?.addEventListener('abort', reason => {
|
||||
abort.abort(reason);
|
||||
});
|
||||
|
||||
signal = abort.signal;
|
||||
|
||||
const disposes: (() => void)[] = [];
|
||||
|
||||
try {
|
||||
disposes.push(
|
||||
this.storage.eventBus.on(event => {
|
||||
const handler = this.events[event.type];
|
||||
handler?.(event as any);
|
||||
})
|
||||
);
|
||||
throwIfAborted(signal);
|
||||
|
||||
for (const doc of this.status.docs) {
|
||||
this.schedule({
|
||||
type: 'connect',
|
||||
docId: doc,
|
||||
});
|
||||
}
|
||||
|
||||
logger.info('Remote sync started');
|
||||
this.status.syncing = true;
|
||||
this.statusUpdatedSubject.next(true);
|
||||
|
||||
this.server.onInterrupted(reason => {
|
||||
abort.abort(reason);
|
||||
});
|
||||
await Promise.race([
|
||||
this.server.waitForConnectingServer(signal),
|
||||
new Promise<void>((_, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new Error('Connect to server timeout'));
|
||||
}, 1000 * 30);
|
||||
}),
|
||||
new Promise((_, reject) => {
|
||||
signal?.addEventListener('abort', reason => {
|
||||
reject(reason);
|
||||
});
|
||||
}),
|
||||
]);
|
||||
|
||||
throwIfAborted(signal);
|
||||
disposes.push(
|
||||
await this.server.subscribeAllDocs(({ docId, data, serverClock }) => {
|
||||
this.schedule({
|
||||
type: 'save',
|
||||
docId: docId,
|
||||
serverClock,
|
||||
update: data,
|
||||
});
|
||||
})
|
||||
);
|
||||
const cachedClocks = await this.storage.loadServerClock(signal);
|
||||
for (const [id, v] of cachedClocks) {
|
||||
this.actions.updateServerClock(id, v);
|
||||
}
|
||||
const maxClockValue = this.status.serverClocks.max;
|
||||
const newClocks = await this.server.loadServerClock(maxClockValue);
|
||||
for (const [id, v] of newClocks) {
|
||||
this.actions.updateServerClock(id, v);
|
||||
}
|
||||
await this.storage.saveServerClock(newClocks, signal);
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
throwIfAborted(signal);
|
||||
|
||||
const docId = await this.status.jobDocQueue.asyncPop(signal);
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const jobs = this.status.jobMap.get(docId);
|
||||
if (!jobs || jobs.length === 0) {
|
||||
this.status.jobMap.delete(docId);
|
||||
this.statusUpdatedSubject.next(docId);
|
||||
break;
|
||||
}
|
||||
|
||||
const connect = remove(jobs, j => j.type === 'connect');
|
||||
if (connect && connect.length > 0) {
|
||||
await this.jobs.connect(docId, signal);
|
||||
continue;
|
||||
}
|
||||
|
||||
const pullAndPush = remove(jobs, j => j.type === 'pullAndPush');
|
||||
if (pullAndPush && pullAndPush.length > 0) {
|
||||
await this.jobs.pullAndPush(docId, signal);
|
||||
continue;
|
||||
}
|
||||
|
||||
const pull = remove(jobs, j => j.type === 'pull');
|
||||
if (pull && pull.length > 0) {
|
||||
await this.jobs.pull(docId, signal);
|
||||
continue;
|
||||
}
|
||||
|
||||
const push = remove(jobs, j => j.type === 'push');
|
||||
if (push && push.length > 0) {
|
||||
await this.jobs.push(
|
||||
docId,
|
||||
push as (Job & { type: 'push' })[],
|
||||
signal
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const save = remove(jobs, j => j.type === 'save');
|
||||
if (save && save.length > 0) {
|
||||
await this.jobs.save(
|
||||
docId,
|
||||
save as (Job & { type: 'save' })[],
|
||||
signal
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
for (const dispose of disposes) {
|
||||
dispose();
|
||||
}
|
||||
try {
|
||||
this.server.disconnectServer();
|
||||
} catch (err) {
|
||||
logger.error('Error on disconnect server', err);
|
||||
}
|
||||
this.status.syncing = false;
|
||||
logger.info('Remote sync ended');
|
||||
}
|
||||
}
|
||||
|
||||
schedule(job: Job) {
|
||||
const priority = this.prioritySettings.get(job.docId) ?? 0;
|
||||
this.status.jobDocQueue.push(job.docId, priority);
|
||||
|
||||
const existingJobs = this.status.jobMap.get(job.docId) ?? [];
|
||||
existingJobs.push(job);
|
||||
this.status.jobMap.set(job.docId, existingJobs);
|
||||
this.statusUpdatedSubject.next(job.docId);
|
||||
}
|
||||
|
||||
setPriority(docId: string, priority: number) {
|
||||
this.prioritySettings.set(docId, priority);
|
||||
this.status.jobDocQueue.updatePriority(docId, priority);
|
||||
}
|
||||
}
|
26
packages/common/infra/src/workspace/engine/doc/server.ts
Normal file
26
packages/common/infra/src/workspace/engine/doc/server.ts
Normal file
@ -0,0 +1,26 @@
|
||||
export interface DocServer {
|
||||
pullDoc(
|
||||
docId: string,
|
||||
stateVector: Uint8Array
|
||||
): Promise<{
|
||||
data: Uint8Array;
|
||||
serverClock: number;
|
||||
stateVector?: Uint8Array;
|
||||
} | null>;
|
||||
|
||||
pushDoc(docId: string, data: Uint8Array): Promise<{ serverClock: number }>;
|
||||
|
||||
loadServerClock(after: number): Promise<Map<string, number>>;
|
||||
|
||||
subscribeAllDocs(
|
||||
cb: (updates: {
|
||||
docId: string;
|
||||
data: Uint8Array;
|
||||
serverClock: number;
|
||||
}) => void
|
||||
): Promise<() => void>;
|
||||
|
||||
waitForConnectingServer(signal: AbortSignal): Promise<void>;
|
||||
disconnectServer(): void;
|
||||
onInterrupted(cb: (reason: string) => void): void;
|
||||
}
|
364
packages/common/infra/src/workspace/engine/doc/storage.ts
Normal file
364
packages/common/infra/src/workspace/engine/doc/storage.ts
Normal file
@ -0,0 +1,364 @@
|
||||
import {
|
||||
type ByteKV,
|
||||
type Memento,
|
||||
MemoryMemento,
|
||||
ReadonlyByteKV,
|
||||
wrapMemento,
|
||||
} from '../../../storage';
|
||||
import { AsyncLock, mergeUpdates, throwIfAborted } from '../../../utils';
|
||||
import type { DocEventBus } from '.';
|
||||
import { DocEventBusInner, MemoryDocEventBus } from './event';
|
||||
import { isEmptyUpdate } from './utils';
|
||||
|
||||
export interface DocStorage {
|
||||
eventBus: DocEventBus;
|
||||
doc: ByteKV;
|
||||
syncMetadata: ByteKV;
|
||||
serverClock: ByteKV;
|
||||
}
|
||||
|
||||
const Keys = {
|
||||
SeqNum: (docId: string) => `${docId}:seqNum`,
|
||||
SeqNumPushed: (docId: string) => `${docId}:seqNumPushed`,
|
||||
ServerClockPulled: (docId: string) => `${docId}:serverClockPulled`,
|
||||
UpdatedTime: (docId: string) => `${docId}:updateTime`,
|
||||
};
|
||||
|
||||
const Values = {
|
||||
UInt64: {
|
||||
parse: (buffer: Uint8Array) => {
|
||||
const view = new DataView(buffer.buffer);
|
||||
return Number(view.getBigUint64(0, false));
|
||||
},
|
||||
serialize: (value: number) => {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
const view = new DataView(buffer);
|
||||
view.setBigUint64(0, BigInt(value), false);
|
||||
return new Uint8Array(buffer);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export class DocStorageInner {
|
||||
public readonly eventBus = new DocEventBusInner(this.behavior.eventBus);
|
||||
constructor(public readonly behavior: DocStorage) {}
|
||||
|
||||
async loadServerClock(signal?: AbortSignal): Promise<Map<string, number>> {
|
||||
throwIfAborted(signal);
|
||||
const list = await this.behavior.serverClock.keys();
|
||||
|
||||
const map = new Map<string, number>();
|
||||
for (const key of list) {
|
||||
const docId = key;
|
||||
const value = await this.behavior.serverClock.get(key);
|
||||
if (value) {
|
||||
map.set(docId, Values.UInt64.parse(value));
|
||||
}
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
async saveServerClock(map: Map<string, number>, signal?: AbortSignal) {
|
||||
throwIfAborted(signal);
|
||||
await this.behavior.serverClock.transaction(async transaction => {
|
||||
for (const [docId, value] of map) {
|
||||
const key = docId;
|
||||
const oldBuffer = await transaction.get(key);
|
||||
const old = oldBuffer ? Values.UInt64.parse(oldBuffer) : 0;
|
||||
if (old < value) {
|
||||
await transaction.set(key, Values.UInt64.serialize(value));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async loadDocSeqNum(docId: string, signal?: AbortSignal) {
|
||||
throwIfAborted(signal);
|
||||
const bytes = await this.behavior.syncMetadata.get(Keys.SeqNum(docId));
|
||||
if (bytes === null) {
|
||||
return 0;
|
||||
}
|
||||
return Values.UInt64.parse(bytes);
|
||||
}
|
||||
|
||||
async saveDocSeqNum(
|
||||
docId: string,
|
||||
seqNum: number | true,
|
||||
signal?: AbortSignal
|
||||
) {
|
||||
throwIfAborted(signal);
|
||||
return await this.behavior.syncMetadata.transaction(async transaction => {
|
||||
const key = Keys.SeqNum(docId);
|
||||
const oldBytes = await transaction.get(key);
|
||||
const old = oldBytes ? Values.UInt64.parse(oldBytes) : 0;
|
||||
if (seqNum === true) {
|
||||
await transaction.set(key, Values.UInt64.serialize(old + 1));
|
||||
return old + 1;
|
||||
}
|
||||
if (old < seqNum) {
|
||||
await transaction.set(key, Values.UInt64.serialize(seqNum));
|
||||
return seqNum;
|
||||
}
|
||||
return old;
|
||||
});
|
||||
}
|
||||
|
||||
async loadDocSeqNumPushed(docId: string, signal?: AbortSignal) {
|
||||
throwIfAborted(signal);
|
||||
const bytes = await this.behavior.syncMetadata.get(
|
||||
Keys.SeqNumPushed(docId)
|
||||
);
|
||||
if (bytes === null) {
|
||||
return null;
|
||||
}
|
||||
return Values.UInt64.parse(bytes);
|
||||
}
|
||||
|
||||
async saveDocPushedSeqNum(
|
||||
docId: string,
|
||||
seqNum: number | { add: number },
|
||||
signal?: AbortSignal
|
||||
) {
|
||||
throwIfAborted(signal);
|
||||
await this.behavior.syncMetadata.transaction(async transaction => {
|
||||
const key = Keys.SeqNumPushed(docId);
|
||||
const oldBytes = await transaction.get(key);
|
||||
const old = oldBytes ? Values.UInt64.parse(oldBytes) : null;
|
||||
if (typeof seqNum === 'object') {
|
||||
return transaction.set(
|
||||
key,
|
||||
Values.UInt64.serialize((old ?? 0) + seqNum.add)
|
||||
);
|
||||
}
|
||||
if (old === null || old < seqNum) {
|
||||
return transaction.set(key, Values.UInt64.serialize(seqNum));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async loadDocServerClockPulled(docId: string, signal?: AbortSignal) {
|
||||
throwIfAborted(signal);
|
||||
const bytes = await this.behavior.syncMetadata.get(
|
||||
Keys.ServerClockPulled(docId)
|
||||
);
|
||||
if (bytes === null) {
|
||||
return null;
|
||||
}
|
||||
return bytes ? Values.UInt64.parse(bytes) : 0;
|
||||
}
|
||||
|
||||
async saveDocServerClockPulled(
|
||||
docId: string,
|
||||
serverClock: number,
|
||||
signal?: AbortSignal
|
||||
) {
|
||||
throwIfAborted(signal);
|
||||
await this.behavior.syncMetadata.transaction(async transaction => {
|
||||
const oldBytes = await transaction.get(Keys.ServerClockPulled(docId));
|
||||
const old = oldBytes ? Values.UInt64.parse(oldBytes) : null;
|
||||
if (old === null || old < serverClock) {
|
||||
await transaction.set(
|
||||
Keys.ServerClockPulled(docId),
|
||||
Values.UInt64.serialize(serverClock)
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async loadDocFromLocal(docId: string, signal?: AbortSignal) {
|
||||
throwIfAborted(signal);
|
||||
return await this.behavior.doc.get(docId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Confirm that server updates are applied in the order they occur!!!
|
||||
*/
|
||||
async commitDocAsServerUpdate(
|
||||
docId: string,
|
||||
update: Uint8Array,
|
||||
serverClock: number,
|
||||
signal?: AbortSignal
|
||||
) {
|
||||
throwIfAborted(signal);
|
||||
await this.behavior.doc.transaction(async tx => {
|
||||
const data = await tx.get(docId);
|
||||
await tx.set(
|
||||
docId,
|
||||
data && !isEmptyUpdate(data)
|
||||
? !isEmptyUpdate(update)
|
||||
? mergeUpdates([data, update])
|
||||
: data
|
||||
: update
|
||||
);
|
||||
});
|
||||
await this.saveDocServerClockPulled(docId, serverClock);
|
||||
}
|
||||
|
||||
async commitDocAsClientUpdate(
|
||||
docId: string,
|
||||
update: Uint8Array,
|
||||
signal?: AbortSignal
|
||||
) {
|
||||
throwIfAborted(signal);
|
||||
|
||||
await this.behavior.doc.transaction(async tx => {
|
||||
const data = await tx.get(docId);
|
||||
await tx.set(
|
||||
docId,
|
||||
data && !isEmptyUpdate(data)
|
||||
? !isEmptyUpdate(update)
|
||||
? mergeUpdates([data, update])
|
||||
: data
|
||||
: update
|
||||
);
|
||||
});
|
||||
|
||||
return await this.saveDocSeqNum(docId, true);
|
||||
}
|
||||
|
||||
clearSyncMetadata() {
|
||||
return this.behavior.syncMetadata.clear();
|
||||
}
|
||||
|
||||
async clearServerClock() {
|
||||
return this.behavior.serverClock.clear();
|
||||
}
|
||||
}
|
||||
|
||||
export class ReadonlyStorage implements DocStorage {
|
||||
constructor(
|
||||
private readonly map: {
|
||||
[key: string]: Uint8Array;
|
||||
}
|
||||
) {}
|
||||
|
||||
eventBus = new MemoryDocEventBus();
|
||||
doc = new ReadonlyByteKV(new Map(Object.entries(this.map)));
|
||||
serverClock = new ReadonlyByteKV();
|
||||
syncMetadata = new ReadonlyByteKV();
|
||||
}
|
||||
|
||||
export class MemoryStorage implements DocStorage {
|
||||
constructor(private readonly memo: Memento = new MemoryMemento()) {}
|
||||
|
||||
eventBus = new MemoryDocEventBus();
|
||||
lock = new AsyncLock();
|
||||
readonly docDb = wrapMemento(this.memo, 'doc:');
|
||||
readonly syncMetadataDb = wrapMemento(this.memo, 'syncMetadata:');
|
||||
readonly serverClockDb = wrapMemento(this.memo, 'serverClock:');
|
||||
|
||||
readonly doc = {
|
||||
transaction: async cb => {
|
||||
using _lock = await this.lock.acquire();
|
||||
return await cb({
|
||||
get: async key => {
|
||||
return this.docDb.get(key) ?? null;
|
||||
},
|
||||
set: async (key, value) => {
|
||||
this.docDb.set(key, value);
|
||||
},
|
||||
keys: async () => {
|
||||
return Array.from(this.docDb.keys());
|
||||
},
|
||||
clear: () => {
|
||||
this.docDb.clear();
|
||||
},
|
||||
del: key => {
|
||||
this.docDb.del(key);
|
||||
},
|
||||
});
|
||||
},
|
||||
get(key) {
|
||||
return this.transaction(async tx => tx.get(key));
|
||||
},
|
||||
set(key, value) {
|
||||
return this.transaction(async tx => tx.set(key, value));
|
||||
},
|
||||
keys() {
|
||||
return this.transaction(async tx => tx.keys());
|
||||
},
|
||||
clear() {
|
||||
return this.transaction(async tx => tx.clear());
|
||||
},
|
||||
del(key) {
|
||||
return this.transaction(async tx => tx.del(key));
|
||||
},
|
||||
} satisfies ByteKV;
|
||||
|
||||
readonly syncMetadata = {
|
||||
transaction: async cb => {
|
||||
using _lock = await this.lock.acquire();
|
||||
return await cb({
|
||||
get: async key => {
|
||||
return this.syncMetadataDb.get(key) ?? null;
|
||||
},
|
||||
set: async (key, value) => {
|
||||
this.syncMetadataDb.set(key, value);
|
||||
},
|
||||
keys: async () => {
|
||||
return Array.from(this.syncMetadataDb.keys());
|
||||
},
|
||||
clear: () => {
|
||||
this.syncMetadataDb.clear();
|
||||
},
|
||||
del: key => {
|
||||
this.syncMetadataDb.del(key);
|
||||
},
|
||||
});
|
||||
},
|
||||
get(key) {
|
||||
return this.transaction(async tx => tx.get(key));
|
||||
},
|
||||
set(key, value) {
|
||||
return this.transaction(async tx => tx.set(key, value));
|
||||
},
|
||||
keys() {
|
||||
return this.transaction(async tx => tx.keys());
|
||||
},
|
||||
clear() {
|
||||
return this.transaction(async tx => tx.clear());
|
||||
},
|
||||
del(key) {
|
||||
return this.transaction(async tx => tx.del(key));
|
||||
},
|
||||
} satisfies ByteKV;
|
||||
|
||||
readonly serverClock = {
|
||||
transaction: async cb => {
|
||||
using _lock = await this.lock.acquire();
|
||||
return await cb({
|
||||
get: async key => {
|
||||
return this.serverClockDb.get(key) ?? null;
|
||||
},
|
||||
set: async (key, value) => {
|
||||
this.serverClockDb.set(key, value);
|
||||
},
|
||||
keys: async () => {
|
||||
return Array.from(this.serverClockDb.keys());
|
||||
},
|
||||
clear: () => {
|
||||
this.serverClockDb.clear();
|
||||
},
|
||||
del: key => {
|
||||
this.serverClockDb.del(key);
|
||||
},
|
||||
});
|
||||
},
|
||||
get(key) {
|
||||
return this.transaction(async tx => tx.get(key));
|
||||
},
|
||||
set(key, value) {
|
||||
return this.transaction(async tx => tx.set(key, value));
|
||||
},
|
||||
keys() {
|
||||
return this.transaction(async tx => tx.keys());
|
||||
},
|
||||
clear() {
|
||||
return this.transaction(async tx => tx.clear());
|
||||
},
|
||||
del(key) {
|
||||
return this.transaction(async tx => tx.del(key));
|
||||
},
|
||||
} satisfies ByteKV;
|
||||
}
|
6
packages/common/infra/src/workspace/engine/doc/utils.ts
Normal file
6
packages/common/infra/src/workspace/engine/doc/utils.ts
Normal file
@ -0,0 +1,6 @@
|
||||
export function isEmptyUpdate(binary: Uint8Array) {
|
||||
return (
|
||||
binary.byteLength === 0 ||
|
||||
(binary.byteLength === 2 && binary[0] === 0 && binary[1] === 0)
|
||||
);
|
||||
}
|
@ -1,13 +1,12 @@
|
||||
import { Slot } from '@blocksuite/global/utils';
|
||||
import type { Doc as YDoc } from 'yjs';
|
||||
|
||||
import { throwIfAborted } from '../../utils/throw-if-aborted';
|
||||
import type { AwarenessEngine } from './awareness';
|
||||
import type { BlobEngine, BlobStatus } from './blob';
|
||||
import type { SyncEngine } from './sync';
|
||||
import { type SyncEngineStatus } from './sync';
|
||||
import type { DocEngine } from './doc';
|
||||
|
||||
export interface WorkspaceEngineStatus {
|
||||
sync: SyncEngineStatus;
|
||||
blob: BlobStatus;
|
||||
}
|
||||
|
||||
@ -31,51 +30,57 @@ export class WorkspaceEngine {
|
||||
|
||||
constructor(
|
||||
public blob: BlobEngine,
|
||||
public sync: SyncEngine,
|
||||
public awareness: AwarenessEngine
|
||||
public doc: DocEngine,
|
||||
public awareness: AwarenessEngine,
|
||||
private readonly yDoc: YDoc
|
||||
) {
|
||||
this._status = {
|
||||
sync: sync.status,
|
||||
blob: blob.status,
|
||||
};
|
||||
sync.onStatusChange.on(status => {
|
||||
this.status = {
|
||||
sync: status,
|
||||
blob: blob.status,
|
||||
};
|
||||
});
|
||||
blob.onStatusChange.on(status => {
|
||||
this.status = {
|
||||
sync: sync.status,
|
||||
blob: status,
|
||||
};
|
||||
});
|
||||
this.doc.addDoc(yDoc);
|
||||
}
|
||||
|
||||
start() {
|
||||
this.sync.start();
|
||||
this.doc.start();
|
||||
this.awareness.connect();
|
||||
this.blob.start();
|
||||
}
|
||||
|
||||
canGracefulStop() {
|
||||
return this.sync.canGracefulStop();
|
||||
return this.doc.engineState.value.saving === 0;
|
||||
}
|
||||
|
||||
async waitForGracefulStop(abort?: AbortSignal) {
|
||||
await this.sync.waitForGracefulStop(abort);
|
||||
await this.doc.waitForSaved();
|
||||
throwIfAborted(abort);
|
||||
this.forceStop();
|
||||
}
|
||||
|
||||
forceStop() {
|
||||
this.sync.forceStop();
|
||||
this.doc.stop();
|
||||
this.awareness.disconnect();
|
||||
this.blob.stop();
|
||||
}
|
||||
|
||||
docEngineState = this.doc.engineState;
|
||||
|
||||
rootDocState = this.doc.docState(this.yDoc.guid);
|
||||
|
||||
waitForSynced() {
|
||||
return this.doc.waitForSynced();
|
||||
}
|
||||
|
||||
waitForRootDocReady() {
|
||||
return this.doc.waitForReady(this.yDoc.guid);
|
||||
}
|
||||
}
|
||||
|
||||
export * from './awareness';
|
||||
export * from './blob';
|
||||
export * from './doc';
|
||||
export * from './error';
|
||||
export * from './sync';
|
||||
|
@ -1,167 +0,0 @@
|
||||
import { WorkspaceFlavour } from '@affine/env/workspace';
|
||||
import { DocCollection } from '@blocksuite/store';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import { Doc } from 'yjs';
|
||||
|
||||
import { MemoryMemento } from '../../../../storage';
|
||||
import { globalBlockSuiteSchema } from '../../../global-schema';
|
||||
import { TestingSyncStorage } from '../../../testing';
|
||||
import { SyncEngineStep, SyncPeerStep } from '../consts';
|
||||
import { SyncEngine } from '../engine';
|
||||
import { createTestStorage } from './test-storage';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
|
||||
});
|
||||
|
||||
const testMeta = {
|
||||
id: 'test',
|
||||
flavour: WorkspaceFlavour.LOCAL,
|
||||
};
|
||||
|
||||
describe('SyncEngine', () => {
|
||||
test('basic - indexeddb', async () => {
|
||||
const storage = new MemoryMemento();
|
||||
const storage1 = new MemoryMemento();
|
||||
const storage2 = new MemoryMemento();
|
||||
let prev: any;
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test',
|
||||
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new TestingSyncStorage(testMeta, storage),
|
||||
[
|
||||
new TestingSyncStorage(testMeta, storage1),
|
||||
new TestingSyncStorage(testMeta, storage2),
|
||||
]
|
||||
);
|
||||
syncEngine.start();
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
page.load();
|
||||
const pageBlockId = page.addBlock(
|
||||
'affine:page' as keyof BlockSuite.BlockModels,
|
||||
{
|
||||
title: new page.Text(''),
|
||||
}
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:surface' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
const frameId = page.addBlock(
|
||||
'affine:note' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:paragraph' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
frameId
|
||||
);
|
||||
await syncEngine.waitForSynced();
|
||||
syncEngine.forceStop();
|
||||
prev = docCollection.doc.toJSON();
|
||||
}
|
||||
|
||||
for (const current of [storage, storage1, storage2]) {
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test',
|
||||
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new TestingSyncStorage(testMeta, current),
|
||||
[]
|
||||
);
|
||||
syncEngine.start();
|
||||
await syncEngine.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncEngine.forceStop();
|
||||
}
|
||||
});
|
||||
|
||||
test('status', async () => {
|
||||
const ydoc = new Doc({ guid: 'test' });
|
||||
|
||||
const storage1 = new MemoryMemento();
|
||||
const storage2 = new MemoryMemento();
|
||||
|
||||
const localStorage = createTestStorage(
|
||||
new TestingSyncStorage(testMeta, storage1)
|
||||
);
|
||||
const remoteStorage = createTestStorage(
|
||||
new TestingSyncStorage(testMeta, storage2)
|
||||
);
|
||||
|
||||
localStorage.pausePull();
|
||||
localStorage.pausePush();
|
||||
remoteStorage.pausePull();
|
||||
remoteStorage.pausePush();
|
||||
|
||||
const syncEngine = new SyncEngine(ydoc, localStorage, [remoteStorage]);
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Stopped);
|
||||
|
||||
syncEngine.start();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(
|
||||
SyncPeerStep.LoadingRootDoc
|
||||
);
|
||||
});
|
||||
|
||||
localStorage.resumePull();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(
|
||||
SyncPeerStep.LoadingRootDoc
|
||||
);
|
||||
});
|
||||
|
||||
remoteStorage.resumePull();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
});
|
||||
|
||||
ydoc.getArray('test').insert(0, [1, 2, 3]);
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Syncing);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Syncing);
|
||||
});
|
||||
|
||||
localStorage.resumePush();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Syncing);
|
||||
});
|
||||
|
||||
remoteStorage.resumePush();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Synced);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Synced);
|
||||
});
|
||||
});
|
||||
});
|
@ -1,115 +0,0 @@
|
||||
import { WorkspaceFlavour } from '@affine/env/workspace';
|
||||
import { DocCollection } from '@blocksuite/store';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import { MemoryMemento } from '../../../../storage';
|
||||
import { globalBlockSuiteSchema } from '../../../global-schema';
|
||||
import { TestingSyncStorage } from '../../../testing';
|
||||
import { SyncPeerStep } from '../consts';
|
||||
import { SyncPeer } from '../peer';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
|
||||
});
|
||||
|
||||
const testMeta = {
|
||||
id: 'test',
|
||||
flavour: WorkspaceFlavour.LOCAL,
|
||||
};
|
||||
|
||||
describe('SyncPeer', () => {
|
||||
test('basic - indexeddb', async () => {
|
||||
const storage = new MemoryMemento();
|
||||
|
||||
let prev: any;
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test',
|
||||
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new TestingSyncStorage(testMeta, storage)
|
||||
);
|
||||
await syncPeer.waitForLoaded();
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
page.load();
|
||||
const pageBlockId = page.addBlock(
|
||||
'affine:page' as keyof BlockSuite.BlockModels,
|
||||
{
|
||||
title: new page.Text(''),
|
||||
}
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:surface' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
const frameId = page.addBlock(
|
||||
'affine:note' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:paragraph' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
frameId
|
||||
);
|
||||
await syncPeer.waitForSynced();
|
||||
syncPeer.stop();
|
||||
prev = docCollection.doc.toJSON();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test',
|
||||
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new TestingSyncStorage(testMeta, storage)
|
||||
);
|
||||
await syncPeer.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncPeer.stop();
|
||||
}
|
||||
});
|
||||
|
||||
test('status', async () => {
|
||||
const storage = new MemoryMemento();
|
||||
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test',
|
||||
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new TestingSyncStorage(testMeta, storage)
|
||||
);
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.LoadingRootDoc);
|
||||
await syncPeer.waitForSynced();
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.Synced);
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.LoadingSubDoc);
|
||||
page.load();
|
||||
await syncPeer.waitForSynced();
|
||||
page.addBlock('affine:page' as keyof BlockSuite.BlockModels, {
|
||||
title: new page.Text(''),
|
||||
});
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.Syncing);
|
||||
syncPeer.stop();
|
||||
});
|
||||
});
|
@ -1,42 +0,0 @@
|
||||
import type { SyncStorage } from '../storage';
|
||||
|
||||
export function createTestStorage(origin: SyncStorage) {
|
||||
const controler = {
|
||||
pausedPull: Promise.resolve(),
|
||||
resumePull: () => {},
|
||||
pausedPush: Promise.resolve(),
|
||||
resumePush: () => {},
|
||||
};
|
||||
|
||||
return {
|
||||
name: `${origin.name}(testing)`,
|
||||
pull(docId: string, state: Uint8Array) {
|
||||
return controler.pausedPull.then(() => origin.pull(docId, state));
|
||||
},
|
||||
push(docId: string, data: Uint8Array) {
|
||||
return controler.pausedPush.then(() => origin.push(docId, data));
|
||||
},
|
||||
subscribe(
|
||||
cb: (docId: string, data: Uint8Array) => void,
|
||||
disconnect: (reason: string) => void
|
||||
) {
|
||||
return origin.subscribe(cb, disconnect);
|
||||
},
|
||||
pausePull() {
|
||||
controler.pausedPull = new Promise(resolve => {
|
||||
controler.resumePull = resolve;
|
||||
});
|
||||
},
|
||||
resumePull() {
|
||||
controler.resumePull?.();
|
||||
},
|
||||
pausePush() {
|
||||
controler.pausedPush = new Promise(resolve => {
|
||||
controler.resumePush = resolve;
|
||||
});
|
||||
},
|
||||
resumePush() {
|
||||
controler.resumePush?.();
|
||||
},
|
||||
};
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
export enum SyncEngineStep {
|
||||
// error
|
||||
Rejected = -1,
|
||||
// in progress
|
||||
Stopped = 0,
|
||||
Syncing = 1,
|
||||
// finished
|
||||
Synced = 2,
|
||||
}
|
||||
|
||||
export enum SyncPeerStep {
|
||||
// error
|
||||
VersionRejected = -1,
|
||||
// in progress
|
||||
Stopped = 0,
|
||||
Retrying = 1,
|
||||
LoadingRootDoc = 2,
|
||||
LoadingSubDoc = 3,
|
||||
Loaded = 4.5,
|
||||
Syncing = 5,
|
||||
// finished
|
||||
Synced = 6,
|
||||
}
|
@ -1,316 +0,0 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { Slot } from '@blocksuite/global/utils';
|
||||
import { Observable } from 'rxjs';
|
||||
import type { Doc } from 'yjs';
|
||||
|
||||
import { createIdentifier } from '../../../di';
|
||||
import { LiveData } from '../../../livedata';
|
||||
import { SharedPriorityTarget } from '../../../utils/async-queue';
|
||||
import { MANUALLY_STOP, throwIfAborted } from '../../../utils/throw-if-aborted';
|
||||
import { SyncEngineStep, SyncPeerStep } from './consts';
|
||||
import { SyncPeer, type SyncPeerStatus } from './peer';
|
||||
import { type SyncStorage } from './storage';
|
||||
|
||||
export interface SyncEngineStatus {
|
||||
step: SyncEngineStep;
|
||||
local: SyncPeerStatus | null;
|
||||
remotes: (SyncPeerStatus | null)[];
|
||||
error: string | null;
|
||||
retrying: boolean;
|
||||
}
|
||||
|
||||
export const LocalSyncStorage =
|
||||
createIdentifier<SyncStorage>('LocalSyncStorage');
|
||||
|
||||
export const RemoteSyncStorage =
|
||||
createIdentifier<SyncStorage>('RemoteSyncStorage');
|
||||
|
||||
/**
|
||||
* # SyncEngine
|
||||
*
|
||||
* ```
|
||||
* ┌────────────┐
|
||||
* │ SyncEngine │
|
||||
* └─────┬──────┘
|
||||
* │
|
||||
* ▼
|
||||
* ┌────────────┐
|
||||
* │ SyncPeer │
|
||||
* ┌─────────┤ local ├─────────┐
|
||||
* │ └─────┬──────┘ │
|
||||
* │ │ │
|
||||
* ▼ ▼ ▼
|
||||
* ┌────────────┐ ┌────────────┐ ┌────────────┐
|
||||
* │ SyncPeer │ │ SyncPeer │ │ SyncPeer │
|
||||
* │ Remote │ │ Remote │ │ Remote │
|
||||
* └────────────┘ └────────────┘ └────────────┘
|
||||
* ```
|
||||
*
|
||||
* Sync engine manage sync peers
|
||||
*
|
||||
* Sync steps:
|
||||
* 1. start local sync
|
||||
* 2. wait for local sync complete
|
||||
* 3. start remote sync
|
||||
* 4. continuously sync local and remote
|
||||
*/
|
||||
export class SyncEngine {
|
||||
get rootDocId() {
|
||||
return this.rootDoc.guid;
|
||||
}
|
||||
|
||||
logger = new DebugLogger('affine:sync-engine:' + this.rootDocId);
|
||||
private _status: SyncEngineStatus;
|
||||
onStatusChange = new Slot<SyncEngineStatus>();
|
||||
private set status(s: SyncEngineStatus) {
|
||||
this.logger.debug('status change', s);
|
||||
this._status = s;
|
||||
this.onStatusChange.emit(s);
|
||||
}
|
||||
isRootDocLoaded = LiveData.from(
|
||||
new Observable<boolean>(observer => {
|
||||
observer.next(
|
||||
[this.status?.local, ...(this.status?.remotes ?? [])].some(
|
||||
p => p?.rootDocLoaded === true
|
||||
)
|
||||
);
|
||||
this.onStatusChange.on(status => {
|
||||
observer.next(
|
||||
[status?.local, ...(status?.remotes ?? [])].some(
|
||||
p => p?.rootDocLoaded === true
|
||||
)
|
||||
);
|
||||
});
|
||||
}),
|
||||
false
|
||||
);
|
||||
|
||||
priorityTarget = new SharedPriorityTarget();
|
||||
|
||||
get status() {
|
||||
return this._status;
|
||||
}
|
||||
|
||||
private abort = new AbortController();
|
||||
|
||||
constructor(
|
||||
private readonly rootDoc: Doc,
|
||||
private readonly local: SyncStorage,
|
||||
private readonly remotes: SyncStorage[]
|
||||
) {
|
||||
this._status = {
|
||||
step: SyncEngineStep.Stopped,
|
||||
local: null,
|
||||
remotes: remotes.map(() => null),
|
||||
error: null,
|
||||
retrying: false,
|
||||
};
|
||||
}
|
||||
|
||||
start() {
|
||||
if (this.status.step !== SyncEngineStep.Stopped) {
|
||||
this.forceStop();
|
||||
}
|
||||
this.abort = new AbortController();
|
||||
|
||||
this.sync(this.abort.signal).catch(err => {
|
||||
// should never reach here
|
||||
this.logger.error(err);
|
||||
});
|
||||
}
|
||||
|
||||
canGracefulStop() {
|
||||
return !!this.status.local && this.status.local.pendingPushUpdates === 0;
|
||||
}
|
||||
|
||||
async waitForGracefulStop(abort?: AbortSignal) {
|
||||
await Promise.race([
|
||||
new Promise((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
new Promise<void>(resolve => {
|
||||
this.onStatusChange.on(() => {
|
||||
if (this.canGracefulStop()) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
]);
|
||||
throwIfAborted(abort);
|
||||
this.forceStop();
|
||||
}
|
||||
|
||||
forceStop() {
|
||||
this.abort.abort(MANUALLY_STOP);
|
||||
this._status = {
|
||||
step: SyncEngineStep.Stopped,
|
||||
local: null,
|
||||
remotes: this.remotes.map(() => null),
|
||||
error: 'Sync progress manually stopped',
|
||||
retrying: false,
|
||||
};
|
||||
}
|
||||
|
||||
// main sync process, should never return until abort
|
||||
async sync(signal: AbortSignal) {
|
||||
const state: {
|
||||
localPeer: SyncPeer | null;
|
||||
remotePeers: (SyncPeer | null)[];
|
||||
} = {
|
||||
localPeer: null,
|
||||
remotePeers: this.remotes.map(() => null),
|
||||
};
|
||||
|
||||
const cleanUp: (() => void)[] = [];
|
||||
try {
|
||||
// Step 1: start local sync peer
|
||||
state.localPeer = new SyncPeer(
|
||||
this.rootDoc,
|
||||
this.local,
|
||||
this.priorityTarget
|
||||
);
|
||||
|
||||
cleanUp.push(
|
||||
state.localPeer.onStatusChange.on(() => {
|
||||
if (!signal.aborted)
|
||||
this.updateSyncingState(state.localPeer, state.remotePeers);
|
||||
}).dispose
|
||||
);
|
||||
|
||||
this.updateSyncingState(state.localPeer, state.remotePeers);
|
||||
|
||||
// Step 2: wait for local sync complete
|
||||
await state.localPeer.waitForLoaded(signal);
|
||||
|
||||
// Step 3: start remote sync peer
|
||||
state.remotePeers = this.remotes.map(remote => {
|
||||
const peer = new SyncPeer(this.rootDoc, remote, this.priorityTarget);
|
||||
cleanUp.push(
|
||||
peer.onStatusChange.on(() => {
|
||||
if (!signal.aborted)
|
||||
this.updateSyncingState(state.localPeer, state.remotePeers);
|
||||
}).dispose
|
||||
);
|
||||
return peer;
|
||||
});
|
||||
|
||||
this.updateSyncingState(state.localPeer, state.remotePeers);
|
||||
|
||||
// Step 4: continuously sync local and remote
|
||||
|
||||
// wait for abort
|
||||
await new Promise((_, reject) => {
|
||||
if (signal.aborted) {
|
||||
reject(signal.reason);
|
||||
}
|
||||
signal.addEventListener('abort', () => {
|
||||
reject(signal.reason);
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
if (error === MANUALLY_STOP || signal.aborted) {
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
// stop peers
|
||||
state.localPeer?.stop();
|
||||
for (const remotePeer of state.remotePeers) {
|
||||
remotePeer?.stop();
|
||||
}
|
||||
for (const clean of cleanUp) {
|
||||
clean();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateSyncingState(local: SyncPeer | null, remotes: (SyncPeer | null)[]) {
|
||||
let step = SyncEngineStep.Synced;
|
||||
let error = null;
|
||||
const allPeer = [local, ...remotes];
|
||||
for (const peer of allPeer) {
|
||||
if (!peer || peer.status.step !== SyncPeerStep.Synced) {
|
||||
if (peer && peer.status.step <= 0) {
|
||||
// step < 0 means reject connection by server with some reason
|
||||
// so the data may be out of date
|
||||
step = SyncEngineStep.Rejected;
|
||||
error = peer.status.lastError;
|
||||
} else {
|
||||
step = SyncEngineStep.Syncing;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
this.status = {
|
||||
step,
|
||||
local: local?.status ?? null,
|
||||
remotes: remotes.map(peer => peer?.status ?? null),
|
||||
error,
|
||||
retrying: allPeer.some(
|
||||
peer => peer?.status.step === SyncPeerStep.Retrying
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
async waitForSynced(abort?: AbortSignal) {
|
||||
if (this.status.step === SyncEngineStep.Synced) {
|
||||
return;
|
||||
} else {
|
||||
return Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
this.onStatusChange.on(status => {
|
||||
if (status.step === SyncEngineStep.Synced) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
new Promise((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
async waitForLoadedRootDoc(abort?: AbortSignal) {
|
||||
function isLoadedRootDoc(status: SyncEngineStatus) {
|
||||
return ![status.local, ...status.remotes].some(
|
||||
peer => !peer || peer.step <= SyncPeerStep.LoadingRootDoc
|
||||
);
|
||||
}
|
||||
if (isLoadedRootDoc(this.status)) {
|
||||
return;
|
||||
} else {
|
||||
return Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
this.onStatusChange.on(status => {
|
||||
if (isLoadedRootDoc(status)) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
new Promise((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
setPriorityRule(target: ((id: string) => boolean) | null) {
|
||||
this.priorityTarget.priorityRule = target;
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
/**
|
||||
*
|
||||
* **SyncEngine**
|
||||
*
|
||||
* Manages one local storage and multiple remote storages.
|
||||
*
|
||||
* Responsible for creating SyncPeers for synchronization, following the local-first strategy.
|
||||
*
|
||||
* **SyncPeer**
|
||||
*
|
||||
* Responsible for synchronizing a single storage with Y.Doc.
|
||||
*
|
||||
* Carries the main synchronization logic.
|
||||
*
|
||||
*/
|
||||
|
||||
export * from './consts';
|
||||
export * from './engine';
|
||||
export * from './peer';
|
||||
export * from './storage';
|
@ -1,464 +0,0 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { Slot } from '@blocksuite/global/utils';
|
||||
import { isEqual } from '@blocksuite/global/utils';
|
||||
import type { Doc } from 'yjs';
|
||||
import { applyUpdate, encodeStateAsUpdate, encodeStateVector } from 'yjs';
|
||||
|
||||
import {
|
||||
PriorityAsyncQueue,
|
||||
SharedPriorityTarget,
|
||||
} from '../../../utils/async-queue';
|
||||
import { mergeUpdates } from '../../../utils/merge-updates';
|
||||
import { MANUALLY_STOP, throwIfAborted } from '../../../utils/throw-if-aborted';
|
||||
import { SyncPeerStep } from './consts';
|
||||
import type { SyncStorage } from './storage';
|
||||
|
||||
export interface SyncPeerStatus {
|
||||
step: SyncPeerStep;
|
||||
totalDocs: number;
|
||||
loadedDocs: number;
|
||||
pendingPullUpdates: number;
|
||||
pendingPushUpdates: number;
|
||||
lastError: string | null;
|
||||
rootDocLoaded: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* # SyncPeer
|
||||
* A SyncPeer is responsible for syncing one Storage with one Y.Doc and its subdocs.
|
||||
*
|
||||
* ```
|
||||
* ┌─────┐
|
||||
* │Start│
|
||||
* └──┬──┘
|
||||
* │
|
||||
* ┌──────┐ ┌─────▼──────┐ ┌────┐
|
||||
* │listen◄─────┤pull rootdoc│ │peer│
|
||||
* └──┬───┘ └─────┬──────┘ └──┬─┘
|
||||
* │ │ onLoad() │
|
||||
* ┌──▼───┐ ┌─────▼──────┐ ┌────▼────┐
|
||||
* │listen◄─────┤pull subdocs│ │subscribe│
|
||||
* └──┬───┘ └─────┬──────┘ └────┬────┘
|
||||
* │ │ onReady() │
|
||||
* ┌──▼──┐ ┌─────▼───────┐ ┌──▼──┐
|
||||
* │queue├──────►apply updates◄───────┤queue│
|
||||
* └─────┘ └─────────────┘ └─────┘
|
||||
* ```
|
||||
*
|
||||
* listen: listen for updates from ydoc, typically from user modifications.
|
||||
* subscribe: listen for updates from storage, typically from other users.
|
||||
*
|
||||
*/
|
||||
export class SyncPeer {
|
||||
private _status: SyncPeerStatus = {
|
||||
step: SyncPeerStep.LoadingRootDoc,
|
||||
totalDocs: 1,
|
||||
loadedDocs: 0,
|
||||
pendingPullUpdates: 0,
|
||||
pendingPushUpdates: 0,
|
||||
lastError: null,
|
||||
rootDocLoaded: false,
|
||||
};
|
||||
onStatusChange = new Slot<SyncPeerStatus>();
|
||||
readonly abort = new AbortController();
|
||||
get name() {
|
||||
return this.storage.name;
|
||||
}
|
||||
logger = new DebugLogger('affine:sync-peer:' + this.name);
|
||||
|
||||
constructor(
|
||||
private readonly rootDoc: Doc,
|
||||
private readonly storage: SyncStorage,
|
||||
private readonly priorityTarget = new SharedPriorityTarget()
|
||||
) {
|
||||
this.logger.debug('peer start');
|
||||
|
||||
this.syncRetryLoop(this.abort.signal).catch(err => {
|
||||
// should not reach here
|
||||
console.error(err);
|
||||
});
|
||||
}
|
||||
|
||||
private set status(s: SyncPeerStatus) {
|
||||
if (!isEqual(s, this._status)) {
|
||||
this.logger.debug('status change', s);
|
||||
this._status = s;
|
||||
this.onStatusChange.emit(s);
|
||||
}
|
||||
}
|
||||
|
||||
get status() {
|
||||
return this._status;
|
||||
}
|
||||
|
||||
/**
|
||||
* stop sync
|
||||
*
|
||||
* SyncPeer is one-time use, this peer should be discarded after call stop().
|
||||
*/
|
||||
stop() {
|
||||
this.logger.debug('peer stop');
|
||||
this.abort.abort(MANUALLY_STOP);
|
||||
}
|
||||
|
||||
/**
|
||||
* auto retry after 5 seconds if sync failed
|
||||
*/
|
||||
async syncRetryLoop(abort: AbortSignal) {
|
||||
while (abort.aborted === false) {
|
||||
try {
|
||||
await this.sync(abort);
|
||||
} catch (err) {
|
||||
if (err === MANUALLY_STOP || abort.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.error('sync error', err);
|
||||
}
|
||||
try {
|
||||
this.logger.error('retry after 5 seconds');
|
||||
this.status = {
|
||||
step: SyncPeerStep.Retrying,
|
||||
totalDocs: 1,
|
||||
loadedDocs: 0,
|
||||
pendingPullUpdates: 0,
|
||||
pendingPushUpdates: 0,
|
||||
lastError: 'Retrying sync after 5 seconds',
|
||||
rootDocLoaded: this.status.rootDocLoaded,
|
||||
};
|
||||
await Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
setTimeout(resolve, 5 * 1000);
|
||||
}),
|
||||
new Promise((_, reject) => {
|
||||
// exit if manually stopped
|
||||
if (abort.aborted) {
|
||||
reject(abort.reason);
|
||||
}
|
||||
abort.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]);
|
||||
} catch (err) {
|
||||
if (err === MANUALLY_STOP || abort.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// should never reach here
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private readonly state: {
|
||||
connectedDocs: Map<string, Doc>;
|
||||
pushUpdatesQueue: PriorityAsyncQueue<{
|
||||
id: string;
|
||||
data: Uint8Array[];
|
||||
}>;
|
||||
pushingUpdate: boolean;
|
||||
pullUpdatesQueue: PriorityAsyncQueue<{
|
||||
id: string;
|
||||
data: Uint8Array;
|
||||
}>;
|
||||
subdocLoading: boolean;
|
||||
subdocsLoadQueue: PriorityAsyncQueue<{ id: string; doc: Doc }>;
|
||||
} = {
|
||||
connectedDocs: new Map(),
|
||||
pushUpdatesQueue: new PriorityAsyncQueue([], this.priorityTarget),
|
||||
pushingUpdate: false,
|
||||
pullUpdatesQueue: new PriorityAsyncQueue([], this.priorityTarget),
|
||||
subdocLoading: false,
|
||||
subdocsLoadQueue: new PriorityAsyncQueue([], this.priorityTarget),
|
||||
};
|
||||
|
||||
initState() {
|
||||
this.state.connectedDocs.clear();
|
||||
this.state.pushUpdatesQueue.clear();
|
||||
this.state.pullUpdatesQueue.clear();
|
||||
this.state.subdocsLoadQueue.clear();
|
||||
this.state.pushingUpdate = false;
|
||||
this.state.subdocLoading = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* main synchronization logic
|
||||
*/
|
||||
async sync(abortOuter: AbortSignal) {
|
||||
this.initState();
|
||||
const abortInner = new AbortController();
|
||||
|
||||
abortOuter.addEventListener('abort', reason => {
|
||||
abortInner.abort(reason);
|
||||
});
|
||||
|
||||
let dispose: (() => void) | null = null;
|
||||
try {
|
||||
this.reportSyncStatus();
|
||||
|
||||
// start listen storage updates
|
||||
dispose = await this.storage.subscribe(
|
||||
this.handleStorageUpdates,
|
||||
reason => {
|
||||
// abort if storage disconnect, should trigger retry loop
|
||||
abortInner.abort('subscribe disconnect:' + reason);
|
||||
}
|
||||
);
|
||||
|
||||
throwIfAborted(abortInner.signal);
|
||||
|
||||
// Step 1: load root doc
|
||||
await this.connectDoc(this.rootDoc, abortInner.signal);
|
||||
|
||||
// Step 2: load subdocs
|
||||
this.state.subdocsLoadQueue.push(
|
||||
...Array.from(this.rootDoc.getSubdocs()).map(doc => ({
|
||||
id: doc.guid,
|
||||
doc,
|
||||
}))
|
||||
);
|
||||
this.reportSyncStatus();
|
||||
|
||||
this.rootDoc.on('subdocs', this.handleSubdocsUpdate);
|
||||
|
||||
// Finally: start sync
|
||||
await Promise.all([
|
||||
// load subdocs
|
||||
(async () => {
|
||||
while (throwIfAborted(abortInner.signal)) {
|
||||
const subdoc = await this.state.subdocsLoadQueue.next(
|
||||
abortInner.signal
|
||||
);
|
||||
this.state.subdocLoading = true;
|
||||
this.reportSyncStatus();
|
||||
await this.connectDoc(subdoc.doc, abortInner.signal);
|
||||
this.state.subdocLoading = false;
|
||||
this.reportSyncStatus();
|
||||
}
|
||||
})(),
|
||||
// pull updates
|
||||
(async () => {
|
||||
while (throwIfAborted(abortInner.signal)) {
|
||||
const { id, data } = await this.state.pullUpdatesQueue.next(
|
||||
abortInner.signal
|
||||
);
|
||||
// don't apply empty data or Uint8Array([0, 0])
|
||||
if (
|
||||
!(
|
||||
data.byteLength === 0 ||
|
||||
(data.byteLength === 2 && data[0] === 0 && data[1] === 0)
|
||||
)
|
||||
) {
|
||||
const subdoc = this.state.connectedDocs.get(id);
|
||||
if (subdoc) {
|
||||
applyUpdate(subdoc, data, this.name);
|
||||
}
|
||||
}
|
||||
this.reportSyncStatus();
|
||||
}
|
||||
})(),
|
||||
// push updates
|
||||
(async () => {
|
||||
while (throwIfAborted(abortInner.signal)) {
|
||||
const { id, data } = await this.state.pushUpdatesQueue.next(
|
||||
abortInner.signal
|
||||
);
|
||||
this.state.pushingUpdate = true;
|
||||
this.reportSyncStatus();
|
||||
|
||||
const merged = mergeUpdates(data);
|
||||
|
||||
// don't push empty data or Uint8Array([0, 0])
|
||||
if (
|
||||
!(
|
||||
merged.byteLength === 0 ||
|
||||
(merged.byteLength === 2 && merged[0] === 0 && merged[1] === 0)
|
||||
)
|
||||
) {
|
||||
await this.storage.push(id, merged);
|
||||
}
|
||||
|
||||
this.state.pushingUpdate = false;
|
||||
this.reportSyncStatus();
|
||||
}
|
||||
})(),
|
||||
]);
|
||||
} finally {
|
||||
dispose?.();
|
||||
for (const docs of this.state.connectedDocs.values()) {
|
||||
this.disconnectDoc(docs);
|
||||
}
|
||||
this.rootDoc.off('subdocs', this.handleSubdocsUpdate);
|
||||
}
|
||||
}
|
||||
|
||||
async connectDoc(doc: Doc, abort: AbortSignal) {
|
||||
const { data: docData, state: inStorageState } =
|
||||
(await this.storage.pull(doc.guid, encodeStateVector(doc))) ?? {};
|
||||
throwIfAborted(abort);
|
||||
|
||||
if (docData !== undefined && doc.guid === this.rootDoc.guid) {
|
||||
this.status = {
|
||||
...this.status,
|
||||
rootDocLoaded: true,
|
||||
};
|
||||
}
|
||||
|
||||
if (docData) {
|
||||
applyUpdate(doc, docData, 'load');
|
||||
}
|
||||
|
||||
// diff root doc and in-storage, save updates to pendingUpdates
|
||||
this.state.pushUpdatesQueue.push({
|
||||
id: doc.guid,
|
||||
data: [encodeStateAsUpdate(doc, inStorageState)],
|
||||
});
|
||||
|
||||
this.state.connectedDocs.set(doc.guid, doc);
|
||||
|
||||
// start listen root doc changes
|
||||
doc.on('update', this.handleYDocUpdates);
|
||||
|
||||
// mark rootDoc as loaded
|
||||
doc.emit('sync', [true]);
|
||||
|
||||
this.reportSyncStatus();
|
||||
}
|
||||
|
||||
disconnectDoc(doc: Doc) {
|
||||
doc.off('update', this.handleYDocUpdates);
|
||||
this.state.connectedDocs.delete(doc.guid);
|
||||
this.reportSyncStatus();
|
||||
}
|
||||
|
||||
// handle updates from ydoc
|
||||
handleYDocUpdates = (update: Uint8Array, origin: string, doc: Doc) => {
|
||||
// don't push updates from storage
|
||||
if (origin === this.name) {
|
||||
return;
|
||||
}
|
||||
|
||||
const exist = this.state.pushUpdatesQueue.find(({ id }) => id === doc.guid);
|
||||
if (exist) {
|
||||
exist.data.push(update);
|
||||
} else {
|
||||
this.state.pushUpdatesQueue.push({
|
||||
id: doc.guid,
|
||||
data: [update],
|
||||
});
|
||||
}
|
||||
|
||||
this.reportSyncStatus();
|
||||
};
|
||||
|
||||
// handle subdocs changes, append new subdocs to queue, remove subdocs from queue
|
||||
handleSubdocsUpdate = ({
|
||||
added,
|
||||
removed,
|
||||
}: {
|
||||
added: Set<Doc>;
|
||||
removed: Set<Doc>;
|
||||
}) => {
|
||||
for (const subdoc of added) {
|
||||
this.state.subdocsLoadQueue.push({ id: subdoc.guid, doc: subdoc });
|
||||
}
|
||||
|
||||
for (const subdoc of removed) {
|
||||
this.disconnectDoc(subdoc);
|
||||
this.state.subdocsLoadQueue.remove(doc => doc.doc === subdoc);
|
||||
}
|
||||
this.reportSyncStatus();
|
||||
};
|
||||
|
||||
// handle updates from storage
|
||||
handleStorageUpdates = (id: string, data: Uint8Array) => {
|
||||
this.state.pullUpdatesQueue.push({
|
||||
id,
|
||||
data,
|
||||
});
|
||||
this.reportSyncStatus();
|
||||
};
|
||||
|
||||
reportSyncStatus() {
|
||||
let step;
|
||||
let lastError = null;
|
||||
if (this.storage.errorMessage?.type === 'outdated') {
|
||||
step = SyncPeerStep.VersionRejected;
|
||||
lastError = this.storage.errorMessage.message.reason;
|
||||
} else if (this.state.connectedDocs.size === 0) {
|
||||
step = SyncPeerStep.LoadingRootDoc;
|
||||
} else if (this.state.subdocsLoadQueue.length || this.state.subdocLoading) {
|
||||
step = SyncPeerStep.LoadingSubDoc;
|
||||
} else if (
|
||||
this.state.pullUpdatesQueue.length ||
|
||||
this.state.pushUpdatesQueue.length ||
|
||||
this.state.pushingUpdate
|
||||
) {
|
||||
step = SyncPeerStep.Syncing;
|
||||
} else {
|
||||
step = SyncPeerStep.Synced;
|
||||
}
|
||||
|
||||
this.status = {
|
||||
step: step,
|
||||
totalDocs:
|
||||
this.state.connectedDocs.size + this.state.subdocsLoadQueue.length,
|
||||
loadedDocs: this.state.connectedDocs.size,
|
||||
pendingPullUpdates:
|
||||
this.state.pullUpdatesQueue.length + (this.state.subdocLoading ? 1 : 0),
|
||||
pendingPushUpdates:
|
||||
this.state.pushUpdatesQueue.length + (this.state.pushingUpdate ? 1 : 0),
|
||||
lastError,
|
||||
rootDocLoaded: this.status.rootDocLoaded,
|
||||
};
|
||||
}
|
||||
|
||||
async waitForSynced(abort?: AbortSignal) {
|
||||
if (this.status.step >= SyncPeerStep.Synced) {
|
||||
return;
|
||||
} else {
|
||||
return Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
this.onStatusChange.on(status => {
|
||||
if (status.step >= SyncPeerStep.Synced) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
new Promise((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
async waitForLoaded(abort?: AbortSignal) {
|
||||
if (this.status.step > SyncPeerStep.Loaded) {
|
||||
return;
|
||||
} else {
|
||||
return Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
this.onStatusChange.on(status => {
|
||||
if (status.step > SyncPeerStep.Loaded) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
new Promise((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
export type RejectByVersion = {
|
||||
currVersion: string;
|
||||
requiredVersion: string;
|
||||
reason: string;
|
||||
};
|
||||
|
||||
export type SyncErrorMessage = {
|
||||
type: 'outdated';
|
||||
message: RejectByVersion;
|
||||
};
|
||||
|
||||
export interface SyncStorage {
|
||||
/**
|
||||
* for debug
|
||||
*/
|
||||
name: string;
|
||||
|
||||
errorMessage?: SyncErrorMessage;
|
||||
|
||||
pull(
|
||||
docId: string,
|
||||
state: Uint8Array
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array } | null>;
|
||||
push(docId: string, data: Uint8Array): Promise<void>;
|
||||
|
||||
/**
|
||||
* Subscribe to updates from peer
|
||||
*
|
||||
* @param cb callback to handle updates
|
||||
* @param disconnect callback to handle disconnect, reason can be something like 'network-error'
|
||||
*
|
||||
* @returns unsubscribe function
|
||||
*/
|
||||
subscribe(
|
||||
cb: (docId: string, data: Uint8Array) => void,
|
||||
disconnect: (reason: string) => void
|
||||
): Promise<() => void>;
|
||||
}
|
||||
|
||||
export const EmptySyncStorage: SyncStorage = {
|
||||
name: 'empty',
|
||||
pull: async () => null,
|
||||
push: async () => {},
|
||||
subscribe: async () => () => {},
|
||||
};
|
||||
|
||||
export const ReadonlyMappingSyncStorage = (map: {
|
||||
[key: string]: Uint8Array;
|
||||
}): SyncStorage => ({
|
||||
name: 'map',
|
||||
pull: async (id: string) => {
|
||||
const data = map[id];
|
||||
return data ? { data } : null;
|
||||
},
|
||||
push: async () => {},
|
||||
subscribe: async () => () => {},
|
||||
});
|
@ -23,11 +23,11 @@ import {
|
||||
AwarenessEngine,
|
||||
AwarenessProvider,
|
||||
BlobEngine,
|
||||
DocEngine,
|
||||
DocServerImpl,
|
||||
DocStorageImpl,
|
||||
LocalBlobStorage,
|
||||
LocalSyncStorage,
|
||||
RemoteBlobStorage,
|
||||
RemoteSyncStorage,
|
||||
SyncEngine,
|
||||
WorkspaceEngine,
|
||||
} from './engine';
|
||||
import { WorkspaceFactory } from './factory';
|
||||
@ -63,13 +63,23 @@ export function configureWorkspaceServices(services: ServiceCollection) {
|
||||
WorkspaceUpgradeController,
|
||||
ServiceProvider,
|
||||
])
|
||||
.add(WorkspaceEngine, [BlobEngine, SyncEngine, AwarenessEngine])
|
||||
.add(WorkspaceEngine, [
|
||||
BlobEngine,
|
||||
DocEngine,
|
||||
AwarenessEngine,
|
||||
RootYDocContext,
|
||||
])
|
||||
.add(AwarenessEngine, [[AwarenessProvider]])
|
||||
.add(BlobEngine, [LocalBlobStorage, [RemoteBlobStorage]])
|
||||
.add(SyncEngine, [RootYDocContext, LocalSyncStorage, [RemoteSyncStorage]])
|
||||
.addImpl(DocEngine, services => {
|
||||
return new DocEngine(
|
||||
services.get(DocStorageImpl),
|
||||
services.getOptional(DocServerImpl)
|
||||
);
|
||||
})
|
||||
.add(WorkspaceUpgradeController, [
|
||||
BlockSuiteWorkspaceContext,
|
||||
SyncEngine,
|
||||
DocEngine,
|
||||
WorkspaceMetadataContext,
|
||||
]);
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ export class WorkspaceManager {
|
||||
async transformLocalToCloud(local: Workspace): Promise<WorkspaceMetadata> {
|
||||
assertEquals(local.flavour, WorkspaceFlavour.LOCAL);
|
||||
|
||||
await local.engine.sync.waitForSynced();
|
||||
await local.engine.waitForSynced();
|
||||
|
||||
const newId = await this.list.create(
|
||||
WorkspaceFlavour.AFFINE_CLOUD,
|
||||
|
@ -6,15 +6,15 @@ import { applyUpdate, encodeStateAsUpdate } from 'yjs';
|
||||
|
||||
import { type ServiceCollection } from '../di';
|
||||
import { GlobalState, type Memento } from '../storage';
|
||||
import { mergeUpdates } from '../utils/merge-updates';
|
||||
import { WorkspaceMetadataContext } from './context';
|
||||
import {
|
||||
AwarenessProvider,
|
||||
type BlobStorage,
|
||||
DocStorageImpl,
|
||||
LocalBlobStorage,
|
||||
LocalSyncStorage,
|
||||
type SyncStorage,
|
||||
MemoryDocStorage,
|
||||
} from './engine';
|
||||
import { MemoryStorage } from './engine/doc/storage';
|
||||
import type { WorkspaceFactory } from './factory';
|
||||
import { globalBlockSuiteSchema } from './global-schema';
|
||||
import type { WorkspaceListProvider } from './list';
|
||||
@ -28,6 +28,7 @@ export class TestingLocalWorkspaceListProvider
|
||||
implements WorkspaceListProvider
|
||||
{
|
||||
name = WorkspaceFlavour.LOCAL;
|
||||
docStorage = new MemoryDocStorage(this.state);
|
||||
|
||||
constructor(private readonly state: Memento) {}
|
||||
|
||||
@ -51,7 +52,6 @@ export class TestingLocalWorkspaceListProvider
|
||||
const meta = { id, flavour: WorkspaceFlavour.LOCAL };
|
||||
|
||||
const blobStorage = new TestingBlobStorage(meta, this.state);
|
||||
const syncStorage = new TestingSyncStorage(meta, this.state);
|
||||
|
||||
const docCollection = new DocCollection({
|
||||
id: id,
|
||||
@ -63,9 +63,9 @@ export class TestingLocalWorkspaceListProvider
|
||||
await initial(docCollection, blobStorage);
|
||||
|
||||
// save workspace to storage
|
||||
await syncStorage.push(id, encodeStateAsUpdate(docCollection.doc));
|
||||
await this.docStorage.doc.set(id, encodeStateAsUpdate(docCollection.doc));
|
||||
for (const subdocs of docCollection.doc.getSubdocs()) {
|
||||
await syncStorage.push(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
await this.docStorage.doc.set(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
}
|
||||
|
||||
const list = this.state.get<WorkspaceMetadata[]>(LIST_STORE_KEY) ?? [];
|
||||
@ -104,14 +104,7 @@ export class TestingLocalWorkspaceListProvider
|
||||
}
|
||||
async getInformation(id: string): Promise<WorkspaceInfo | undefined> {
|
||||
// get information from root doc
|
||||
const storage = new TestingSyncStorage(
|
||||
{
|
||||
flavour: WorkspaceFlavour.LOCAL,
|
||||
id,
|
||||
},
|
||||
this.state
|
||||
);
|
||||
const data = await storage.pull(id, new Uint8Array([]));
|
||||
const data = await this.docStorage.doc.get(id);
|
||||
|
||||
if (!data) {
|
||||
return;
|
||||
@ -122,7 +115,7 @@ export class TestingLocalWorkspaceListProvider
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
|
||||
applyUpdate(bs.doc, data.data);
|
||||
applyUpdate(bs.doc, data);
|
||||
|
||||
return {
|
||||
name: bs.meta.name,
|
||||
@ -143,10 +136,7 @@ export class TestingLocalWorkspaceFactory implements WorkspaceFactory {
|
||||
WorkspaceMetadataContext,
|
||||
GlobalState,
|
||||
])
|
||||
.addImpl(LocalSyncStorage, TestingSyncStorage, [
|
||||
WorkspaceMetadataContext,
|
||||
GlobalState,
|
||||
])
|
||||
.addImpl(DocStorageImpl, MemoryStorage, [GlobalState])
|
||||
.addImpl(AwarenessProvider, TestingAwarenessProvider);
|
||||
}
|
||||
|
||||
@ -161,38 +151,6 @@ export class TestingLocalWorkspaceFactory implements WorkspaceFactory {
|
||||
}
|
||||
}
|
||||
|
||||
export class TestingSyncStorage implements SyncStorage {
|
||||
constructor(
|
||||
private readonly metadata: WorkspaceMetadata,
|
||||
private readonly state: Memento
|
||||
) {}
|
||||
name: string = 'testing';
|
||||
async pull(
|
||||
docId: string,
|
||||
_: Uint8Array
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array | undefined } | null> {
|
||||
const key = 'testing-sync/' + this.metadata.id + '/' + docId;
|
||||
const data = this.state.get<Uint8Array>(key);
|
||||
if (data) {
|
||||
return { data };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
async push(docId: string, data: Uint8Array): Promise<void> {
|
||||
const key = 'testing-sync/' + this.metadata.id + '/' + docId;
|
||||
const oldData = this.state.get<Uint8Array>(key);
|
||||
const update = mergeUpdates(oldData ? [oldData, data] : [data]);
|
||||
this.state.set(key, update);
|
||||
}
|
||||
async subscribe(
|
||||
_cb: (docId: string, data: Uint8Array) => void,
|
||||
_disconnect: (reason: string) => void
|
||||
): Promise<() => void> {
|
||||
return () => {};
|
||||
}
|
||||
}
|
||||
|
||||
export class TestingBlobStorage implements BlobStorage {
|
||||
name = 'testing';
|
||||
readonly = false;
|
||||
|
@ -7,7 +7,7 @@ import { applyUpdate, Doc as YDoc, encodeStateAsUpdate } from 'yjs';
|
||||
import { checkWorkspaceCompatibility, MigrationPoint } from '../blocksuite';
|
||||
import { forceUpgradePages, upgradeV1ToV2 } from '../blocksuite';
|
||||
import { migrateGuidCompatibility } from '../blocksuite';
|
||||
import type { SyncEngine } from './engine/sync';
|
||||
import type { DocEngine } from './engine';
|
||||
import type { WorkspaceManager } from './manager';
|
||||
import { type WorkspaceMetadata } from './metadata';
|
||||
|
||||
@ -39,7 +39,7 @@ export class WorkspaceUpgradeController {
|
||||
|
||||
constructor(
|
||||
private readonly docCollection: DocCollection,
|
||||
private readonly sync: SyncEngine,
|
||||
private readonly docEngine: DocEngine,
|
||||
private readonly workspaceMetadata: WorkspaceMetadata
|
||||
) {
|
||||
docCollection.doc.on('update', () => {
|
||||
@ -69,7 +69,7 @@ export class WorkspaceUpgradeController {
|
||||
this.status = { ...this.status, upgrading: true };
|
||||
|
||||
try {
|
||||
await this.sync.waitForSynced();
|
||||
await this.docEngine.waitForSynced();
|
||||
|
||||
const step = checkWorkspaceCompatibility(
|
||||
this.docCollection,
|
||||
@ -109,12 +109,12 @@ export class WorkspaceUpgradeController {
|
||||
migrateGuidCompatibility(clonedDoc);
|
||||
await forceUpgradePages(clonedDoc, this.docCollection.schema);
|
||||
applyDoc(this.docCollection.doc, clonedDoc);
|
||||
await this.sync.waitForSynced();
|
||||
await this.docEngine.waitForSynced();
|
||||
return null;
|
||||
} else if (step === MigrationPoint.BlockVersion) {
|
||||
await forceUpgradePages(clonedDoc, this.docCollection.schema);
|
||||
applyDoc(this.docCollection.doc, clonedDoc);
|
||||
await this.sync.waitForSynced();
|
||||
await this.docEngine.waitForSynced();
|
||||
return null;
|
||||
} else {
|
||||
throw new Unreachable();
|
||||
|
@ -126,8 +126,7 @@ export class Workspace {
|
||||
this.services.get(CleanupService).cleanup();
|
||||
}
|
||||
|
||||
// same as `WorkspaceEngine.sync.setPriorityRule`
|
||||
setPriorityRule(target: ((id: string) => boolean) | null) {
|
||||
this.engine.sync.setPriorityRule(target);
|
||||
setPriorityLoad(docId: string, priority: number) {
|
||||
this.engine.doc.setPriority(docId, priority);
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +0,0 @@
|
||||
import type { SyncEngineStatus } from '@toeverything/infra';
|
||||
import { atom } from 'jotai';
|
||||
|
||||
export const syncEngineStatusAtom = atom<SyncEngineStatus | null>(null);
|
@ -31,7 +31,7 @@ export const ExportPanel = ({
|
||||
setSaving(true);
|
||||
try {
|
||||
if (isOnline) {
|
||||
await workspace.engine.sync.waitForSynced();
|
||||
await workspace.engine.waitForSynced();
|
||||
await workspace.engine.blob.sync();
|
||||
}
|
||||
|
||||
|
@ -8,6 +8,8 @@ import { useWorkspace } from '@affine/core/hooks/use-workspace';
|
||||
import { useWorkspaceInfo } from '@affine/core/hooks/use-workspace-info';
|
||||
import { UNTITLED_WORKSPACE_NAME } from '@affine/env/constant';
|
||||
import { useAFFiNEI18N } from '@affine/i18n/hooks';
|
||||
import { ArrowRightSmallIcon } from '@blocksuite/icons';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
import { DeleteLeaveWorkspace } from './delete-leave-workspace';
|
||||
import { EnableCloudPanel } from './enable-cloud';
|
||||
@ -29,6 +31,17 @@ export const WorkspaceSettingDetail = (props: WorkspaceSettingDetailProps) => {
|
||||
|
||||
const workspaceInfo = useWorkspaceInfo(workspaceMetadata);
|
||||
|
||||
const handleResetSyncStatus = useCallback(() => {
|
||||
workspace?.engine.doc
|
||||
.resetSyncStatus()
|
||||
.then(() => {
|
||||
window.location.reload();
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
}, [workspace]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<SettingHeader
|
||||
@ -64,6 +77,19 @@ export const WorkspaceSettingDetail = (props: WorkspaceSettingDetailProps) => {
|
||||
)}
|
||||
<SettingWrapper>
|
||||
<DeleteLeaveWorkspace {...props} />
|
||||
<SettingRow
|
||||
name={
|
||||
<span style={{ color: 'var(--affine-text-secondary-color)' }}>
|
||||
{t['com.affine.resetSyncStatus.button']()}
|
||||
</span>
|
||||
}
|
||||
desc={t['com.affine.resetSyncStatus.description']()}
|
||||
style={{ cursor: 'pointer' }}
|
||||
onClick={handleResetSyncStatus}
|
||||
data-testid="reset-sync-status"
|
||||
>
|
||||
<ArrowRightSmallIcon />
|
||||
</SettingRow>
|
||||
</SettingWrapper>
|
||||
</>
|
||||
);
|
||||
|
@ -5,13 +5,12 @@ import { Button } from '@affine/component/ui/button';
|
||||
import { Upload } from '@affine/core/components/pure/file-upload';
|
||||
import { useAsyncCallback } from '@affine/core/hooks/affine-async-hooks';
|
||||
import { useWorkspaceBlobObjectUrl } from '@affine/core/hooks/use-workspace-blob';
|
||||
import { useWorkspaceStatus } from '@affine/core/hooks/use-workspace-status';
|
||||
import { validateAndReduceImage } from '@affine/core/utils/reduce-image';
|
||||
import { UNTITLED_WORKSPACE_NAME } from '@affine/env/constant';
|
||||
import { useAFFiNEI18N } from '@affine/i18n/hooks';
|
||||
import { CameraIcon } from '@blocksuite/icons';
|
||||
import type { Workspace } from '@toeverything/infra';
|
||||
import { SyncPeerStep } from '@toeverything/infra';
|
||||
import { useLiveData } from '@toeverything/infra';
|
||||
import { useSetAtom } from 'jotai';
|
||||
import {
|
||||
type KeyboardEvent,
|
||||
@ -32,13 +31,7 @@ export const ProfilePanel = ({ isOwner, workspace }: ProfilePanelProps) => {
|
||||
const t = useAFFiNEI18N();
|
||||
const pushNotification = useSetAtom(pushNotificationAtom);
|
||||
|
||||
const workspaceIsLoading =
|
||||
useWorkspaceStatus(
|
||||
workspace,
|
||||
status =>
|
||||
!status.engine.sync.local ||
|
||||
status.engine.sync.local?.step <= SyncPeerStep.LoadingRootDoc
|
||||
) ?? true;
|
||||
const workspaceIsReady = useLiveData(workspace?.engine.rootDocState)?.ready;
|
||||
|
||||
const [avatarBlob, setAvatarBlob] = useState<string | null>(null);
|
||||
const [name, setName] = useState('');
|
||||
@ -158,7 +151,7 @@ export const ProfilePanel = ({ isOwner, workspace }: ProfilePanelProps) => {
|
||||
[pushNotification, setWorkspaceAvatar]
|
||||
);
|
||||
|
||||
const canAdjustAvatar = !workspaceIsLoading && avatarUrl && isOwner;
|
||||
const canAdjustAvatar = workspaceIsReady && avatarUrl && isOwner;
|
||||
|
||||
return (
|
||||
<div className={style.profileWrapper}>
|
||||
@ -194,7 +187,7 @@ export const ProfilePanel = ({ isOwner, workspace }: ProfilePanelProps) => {
|
||||
<div className={style.label}>{t['Workspace Name']()}</div>
|
||||
<FlexWrapper alignItems="center" flexGrow="1">
|
||||
<Input
|
||||
disabled={workspaceIsLoading || !isOwner}
|
||||
disabled={!workspaceIsReady || !isOwner}
|
||||
value={input}
|
||||
style={{ width: 280, height: 32 }}
|
||||
data-testid="workspace-name-input"
|
||||
|
@ -1,10 +1,9 @@
|
||||
import { Loading } from '@affine/component/ui/loading';
|
||||
import { formatDate } from '@affine/core/components/page-list';
|
||||
import { useSyncEngineStatus } from '@affine/core/hooks/affine/use-sync-engine-status';
|
||||
import { useDocEngineStatus } from '@affine/core/hooks/affine/use-doc-engine-status';
|
||||
import { useAsyncCallback } from '@affine/core/hooks/affine-async-hooks';
|
||||
import { useAFFiNEI18N } from '@affine/i18n/hooks';
|
||||
import type { DocMeta } from '@blocksuite/store';
|
||||
import { SyncEngineStep } from '@toeverything/infra';
|
||||
import type { CommandCategory } from '@toeverything/infra/command';
|
||||
import clsx from 'clsx';
|
||||
import { Command } from 'cmdk';
|
||||
@ -163,7 +162,7 @@ export const CMDKContainer = ({
|
||||
const [value, setValue] = useAtom(cmdkValueAtom);
|
||||
const isInEditor = pageMeta !== undefined;
|
||||
const [opening, setOpening] = useState(open);
|
||||
const { syncEngineStatus, progress } = useSyncEngineStatus();
|
||||
const { syncing, progress } = useDocEngineStatus();
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
// fix list height animation on opening
|
||||
@ -205,8 +204,7 @@ export const CMDKContainer = ({
|
||||
inEditor: isInEditor,
|
||||
})}
|
||||
>
|
||||
{!syncEngineStatus ||
|
||||
syncEngineStatus.step === SyncEngineStep.Syncing ? (
|
||||
{syncing ? (
|
||||
<Loading
|
||||
size={24}
|
||||
progress={progress ? Math.max(progress, 0.2) : undefined}
|
||||
|
@ -3,8 +3,8 @@ import { Avatar } from '@affine/component/ui/avatar';
|
||||
import { Loading } from '@affine/component/ui/loading';
|
||||
import { Tooltip } from '@affine/component/ui/tooltip';
|
||||
import { openSettingModalAtom } from '@affine/core/atoms';
|
||||
import { useDocEngineStatus } from '@affine/core/hooks/affine/use-doc-engine-status';
|
||||
import { useIsWorkspaceOwner } from '@affine/core/hooks/affine/use-is-workspace-owner';
|
||||
import { useSyncEngineStatus } from '@affine/core/hooks/affine/use-sync-engine-status';
|
||||
import { useWorkspaceBlobObjectUrl } from '@affine/core/hooks/use-workspace-blob';
|
||||
import { useWorkspaceInfo } from '@affine/core/hooks/use-workspace-info';
|
||||
import { UNTITLED_WORKSPACE_NAME } from '@affine/env/constant';
|
||||
@ -17,7 +17,7 @@ import {
|
||||
NoNetworkIcon,
|
||||
UnsyncIcon,
|
||||
} from '@blocksuite/icons';
|
||||
import { SyncEngineStep, Workspace } from '@toeverything/infra';
|
||||
import { Workspace } from '@toeverything/infra';
|
||||
import { useService } from '@toeverything/infra/di';
|
||||
import { useSetAtom } from 'jotai';
|
||||
import { debounce } from 'lodash-es';
|
||||
@ -94,8 +94,7 @@ const useSyncEngineSyncProgress = () => {
|
||||
const t = useAFFiNEI18N();
|
||||
const isOnline = useSystemOnline();
|
||||
const pushNotification = useSetAtom(pushNotificationAtom);
|
||||
const { syncEngineStatus, setSyncEngineStatus, progress } =
|
||||
useSyncEngineStatus();
|
||||
const { syncing, progress, retrying, errorMessage } = useDocEngineStatus();
|
||||
const [isOverCapacity, setIsOverCapacity] = useState(false);
|
||||
|
||||
const currentWorkspace = useService(Workspace);
|
||||
@ -111,19 +110,6 @@ const useSyncEngineSyncProgress = () => {
|
||||
|
||||
// debounce sync engine status
|
||||
useEffect(() => {
|
||||
setSyncEngineStatus(currentWorkspace.engine.sync.status);
|
||||
const disposable = currentWorkspace.engine.sync.onStatusChange.on(
|
||||
debounce(
|
||||
status => {
|
||||
setSyncEngineStatus(status);
|
||||
},
|
||||
300,
|
||||
{
|
||||
maxWait: 500,
|
||||
trailing: true,
|
||||
}
|
||||
)
|
||||
);
|
||||
const disposableOverCapacity =
|
||||
currentWorkspace.engine.blob.onStatusChange.on(
|
||||
debounce(status => {
|
||||
@ -153,17 +139,9 @@ const useSyncEngineSyncProgress = () => {
|
||||
})
|
||||
);
|
||||
return () => {
|
||||
disposable?.dispose();
|
||||
disposableOverCapacity?.dispose();
|
||||
};
|
||||
}, [
|
||||
currentWorkspace,
|
||||
isOwner,
|
||||
jumpToPricePlan,
|
||||
pushNotification,
|
||||
setSyncEngineStatus,
|
||||
t,
|
||||
]);
|
||||
}, [currentWorkspace, isOwner, jumpToPricePlan, pushNotification, t]);
|
||||
|
||||
const content = useMemo(() => {
|
||||
// TODO: add i18n
|
||||
@ -176,21 +154,15 @@ const useSyncEngineSyncProgress = () => {
|
||||
if (!isOnline) {
|
||||
return 'Disconnected, please check your network connection';
|
||||
}
|
||||
if (!syncEngineStatus || syncEngineStatus.step === SyncEngineStep.Syncing) {
|
||||
if (syncing) {
|
||||
return (
|
||||
`Syncing with AFFiNE Cloud` +
|
||||
(progress ? ` (${Math.floor(progress * 100)}%)` : '')
|
||||
);
|
||||
} else if (
|
||||
syncEngineStatus &&
|
||||
syncEngineStatus.step < SyncEngineStep.Syncing
|
||||
) {
|
||||
return (
|
||||
syncEngineStatus.error ||
|
||||
'Disconnected, please check your network connection'
|
||||
);
|
||||
} else if (retrying && errorMessage) {
|
||||
return `${errorMessage}, reconnecting.`;
|
||||
}
|
||||
if (syncEngineStatus.retrying) {
|
||||
if (retrying) {
|
||||
return 'Sync disconnected due to unexpected issues, reconnecting.';
|
||||
}
|
||||
if (isOverCapacity) {
|
||||
@ -199,29 +171,31 @@ const useSyncEngineSyncProgress = () => {
|
||||
return 'Synced with AFFiNE Cloud';
|
||||
}, [
|
||||
currentWorkspace.flavour,
|
||||
errorMessage,
|
||||
isOnline,
|
||||
isOverCapacity,
|
||||
progress,
|
||||
syncEngineStatus,
|
||||
retrying,
|
||||
syncing,
|
||||
]);
|
||||
|
||||
const CloudWorkspaceSyncStatus = useCallback(() => {
|
||||
if (!syncEngineStatus || syncEngineStatus.step === SyncEngineStep.Syncing) {
|
||||
if (syncing) {
|
||||
return SyncingWorkspaceStatus({
|
||||
progress: progress ? Math.max(progress, 0.2) : undefined,
|
||||
});
|
||||
} else if (syncEngineStatus.retrying || isOverCapacity) {
|
||||
} else if (retrying) {
|
||||
return UnSyncWorkspaceStatus();
|
||||
} else {
|
||||
return CloudWorkspaceStatus();
|
||||
}
|
||||
}, [isOverCapacity, progress, syncEngineStatus]);
|
||||
}, [progress, retrying, syncing]);
|
||||
|
||||
return {
|
||||
message: content,
|
||||
icon:
|
||||
currentWorkspace.flavour === WorkspaceFlavour.AFFINE_CLOUD ? (
|
||||
!isOnline || syncEngineStatus?.error ? (
|
||||
!isOnline ? (
|
||||
<OfflineStatus />
|
||||
) : (
|
||||
<CloudWorkspaceSyncStatus />
|
||||
|
@ -0,0 +1,20 @@
|
||||
import { useLiveData, useService, Workspace } from '@toeverything/infra';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
export function useDocEngineStatus() {
|
||||
const workspace = useService(Workspace);
|
||||
|
||||
const engineState = useLiveData(workspace.engine.docEngineState);
|
||||
|
||||
const progress =
|
||||
(engineState.total - engineState.syncing) / engineState.total;
|
||||
|
||||
return useMemo(
|
||||
() => ({
|
||||
...engineState,
|
||||
progress,
|
||||
syncing: engineState.syncing > 0,
|
||||
}),
|
||||
[engineState, progress]
|
||||
);
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
import { syncEngineStatusAtom } from '@affine/core/atoms/sync-engine-status';
|
||||
import { useAtom } from 'jotai';
|
||||
import { mean } from 'lodash-es';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
export function useSyncEngineStatus() {
|
||||
const [syncEngineStatus, setSyncEngineStatus] = useAtom(syncEngineStatusAtom);
|
||||
|
||||
const progress = useMemo(() => {
|
||||
if (!syncEngineStatus?.remotes || syncEngineStatus?.remotes.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return mean(
|
||||
syncEngineStatus.remotes.map(peer => {
|
||||
if (!peer) {
|
||||
return 0;
|
||||
}
|
||||
const totalTask =
|
||||
peer.totalDocs + peer.pendingPullUpdates + peer.pendingPushUpdates;
|
||||
const doneTask = peer.loadedDocs;
|
||||
|
||||
return doneTask / totalTask;
|
||||
})
|
||||
);
|
||||
}, [syncEngineStatus?.remotes]);
|
||||
|
||||
return useMemo(
|
||||
() => ({
|
||||
syncEngineStatus,
|
||||
setSyncEngineStatus,
|
||||
progress,
|
||||
}),
|
||||
[progress, setSyncEngineStatus, syncEngineStatus]
|
||||
);
|
||||
}
|
@ -4,6 +4,17 @@ import { Observable } from 'rxjs';
|
||||
export class LocalStorageMemento implements Memento {
|
||||
constructor(private readonly prefix: string) {}
|
||||
|
||||
keys(): string[] {
|
||||
const keys: string[] = [];
|
||||
for (let i = 0; i < localStorage.length; i++) {
|
||||
const key = localStorage.key(i);
|
||||
if (key && key.startsWith(this.prefix)) {
|
||||
keys.push(key.slice(this.prefix.length));
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
get<T>(key: string): T | null {
|
||||
const json = localStorage.getItem(this.prefix + key);
|
||||
return json ? JSON.parse(json) : null;
|
||||
@ -29,6 +40,16 @@ export class LocalStorageMemento implements Memento {
|
||||
channel.postMessage(value);
|
||||
channel.close();
|
||||
}
|
||||
|
||||
del(key: string): void {
|
||||
localStorage.removeItem(this.prefix + key);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
for (const key of this.keys()) {
|
||||
this.del(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class LocalStorageGlobalCache
|
||||
|
@ -15,12 +15,11 @@ import type { AffineEditorContainer } from '@blocksuite/presets';
|
||||
import type { Doc as BlockSuiteDoc } from '@blocksuite/store';
|
||||
import type { Doc } from '@toeverything/infra';
|
||||
import {
|
||||
DocStorageImpl,
|
||||
EmptyBlobStorage,
|
||||
LocalBlobStorage,
|
||||
LocalSyncStorage,
|
||||
PageManager,
|
||||
type PageMode,
|
||||
ReadonlyMappingSyncStorage,
|
||||
RemoteBlobStorage,
|
||||
ServiceProviderContext,
|
||||
useLiveData,
|
||||
@ -29,6 +28,7 @@ import {
|
||||
WorkspaceManager,
|
||||
WorkspaceScope,
|
||||
} from '@toeverything/infra';
|
||||
import { ReadonlyDocStorage } from '@toeverything/infra';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import type { LoaderFunction } from 'react-router-dom';
|
||||
import {
|
||||
@ -152,8 +152,8 @@ export const Component = () => {
|
||||
])
|
||||
.addImpl(RemoteBlobStorage('static'), StaticBlobStorage)
|
||||
.addImpl(
|
||||
LocalSyncStorage,
|
||||
ReadonlyMappingSyncStorage({
|
||||
DocStorageImpl,
|
||||
new ReadonlyDocStorage({
|
||||
[workspaceId]: new Uint8Array(workspaceArrayBuffer),
|
||||
[pageId]: new Uint8Array(pageArrayBuffer),
|
||||
})
|
||||
@ -161,8 +161,8 @@ export const Component = () => {
|
||||
}
|
||||
);
|
||||
|
||||
workspace.engine.sync
|
||||
.waitForSynced()
|
||||
workspace.engine
|
||||
.waitForRootDocReady()
|
||||
.then(() => {
|
||||
const { page } = workspace.services.get(PageManager).open(pageId);
|
||||
|
||||
|
@ -304,7 +304,10 @@ export const DetailPage = ({ pageId }: { pageId: string }): ReactElement => {
|
||||
|
||||
// set sync engine priority target
|
||||
useEffect(() => {
|
||||
currentWorkspace.setPriorityRule(id => id.endsWith(pageId));
|
||||
currentWorkspace.setPriorityLoad(pageId, 10);
|
||||
return () => {
|
||||
currentWorkspace.setPriorityLoad(pageId, 5);
|
||||
};
|
||||
}, [currentWorkspace, pageId]);
|
||||
|
||||
const jumpOnce = useLiveData(pageRecord?.meta.map(meta => meta.jumpOnce));
|
||||
|
@ -70,7 +70,8 @@ export const Component = (): ReactElement => {
|
||||
}, [meta, workspaceManager, workspace, currentWorkspaceService]);
|
||||
|
||||
// avoid doing operation, before workspace is loaded
|
||||
const isRootDocLoaded = useLiveData(workspace?.engine.sync.isRootDocLoaded);
|
||||
const isRootDocReady =
|
||||
useLiveData(workspace?.engine.rootDocState)?.ready ?? false;
|
||||
|
||||
// if listLoading is false, we can show 404 page, otherwise we should show loading page.
|
||||
if (listLoading === false && meta === undefined) {
|
||||
@ -81,7 +82,7 @@ export const Component = (): ReactElement => {
|
||||
return <WorkspaceFallback key="workspaceLoading" />;
|
||||
}
|
||||
|
||||
if (!isRootDocLoaded) {
|
||||
if (!isRootDocReady) {
|
||||
return (
|
||||
<ServiceProviderContext.Provider value={workspace.services}>
|
||||
<WorkspaceFallback key="workspaceLoading" />
|
||||
|
@ -34,7 +34,7 @@ export async function configureTestingEnvironment() {
|
||||
})
|
||||
);
|
||||
|
||||
await workspace.engine.sync.waitForSynced();
|
||||
await workspace.engine.waitForSynced();
|
||||
|
||||
const { page } = workspace.services.get(PageManager).open('page0');
|
||||
|
||||
|
@ -47,6 +47,7 @@
|
||||
"@types/mixpanel-browser": "^2.49.0",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"builder-util-runtime": "^9.2.4",
|
||||
"core-js": "^3.36.1",
|
||||
"cross-env": "^7.0.3",
|
||||
"electron": "^29.0.1",
|
||||
"electron-log": "^5.1.1",
|
||||
|
@ -1,3 +1,4 @@
|
||||
import './polyfill/dispose';
|
||||
// Side effect import, "declare global"
|
||||
import '@affine/env/constant';
|
||||
|
||||
|
2
packages/frontend/electron/renderer/polyfill/dispose.ts
Normal file
2
packages/frontend/electron/renderer/polyfill/dispose.ts
Normal file
@ -0,0 +1,2 @@
|
||||
import 'core-js/modules/esnext.symbol.async-dispose';
|
||||
import 'core-js/modules/esnext.symbol.dispose';
|
@ -1167,5 +1167,7 @@
|
||||
"com.affine.delete-tags.count_other": "{{count}} tags deleted",
|
||||
"com.affine.workbench.split-view-menu.keep-this-one": "Solo View",
|
||||
"com.affine.workbench.split-view.page-menu-open": "Open in split view",
|
||||
"com.affine.search-tags.placeholder": "Type here ..."
|
||||
"com.affine.search-tags.placeholder": "Type here ...",
|
||||
"com.affine.resetSyncStatus.button": "Reset Sync",
|
||||
"com.affine.resetSyncStatus.description": "This operation may fix some synchronization issues."
|
||||
}
|
||||
|
@ -14,6 +14,7 @@
|
||||
"@affine/core": "workspace:*",
|
||||
"@affine/env": "workspace:*",
|
||||
"@juggle/resize-observer": "^3.4.0",
|
||||
"core-js": "^3.36.1",
|
||||
"intl-segmenter-polyfill-rs": "^0.1.7",
|
||||
"mixpanel-browser": "^2.49.0",
|
||||
"react": "^18.2.0",
|
||||
|
@ -1,3 +1,4 @@
|
||||
import './polyfill/dispose';
|
||||
import './polyfill/intl-segmenter';
|
||||
import './polyfill/request-idle-callback';
|
||||
import './polyfill/resize-observer';
|
||||
|
2
packages/frontend/web/src/polyfill/dispose.ts
Normal file
2
packages/frontend/web/src/polyfill/dispose.ts
Normal file
@ -0,0 +1,2 @@
|
||||
import 'core-js/modules/esnext.symbol.async-dispose';
|
||||
import 'core-js/modules/esnext.symbol.dispose';
|
@ -1,5 +1,5 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import type { AwarenessProvider, RejectByVersion } from '@toeverything/infra';
|
||||
import type { AwarenessProvider } from '@toeverything/infra';
|
||||
import {
|
||||
applyAwarenessUpdate,
|
||||
type Awareness,
|
||||
@ -135,7 +135,7 @@ export class AffineCloudAwarenessProvider implements AwarenessProvider {
|
||||
);
|
||||
};
|
||||
|
||||
handleReject = (_msg: RejectByVersion) => {
|
||||
handleReject = () => {
|
||||
this.socket.off('server-version-rejected', this.handleReject);
|
||||
this.disconnect();
|
||||
this.socket.disconnect();
|
||||
|
24
packages/frontend/workspace-impl/src/cloud/doc-static.ts
Normal file
24
packages/frontend/workspace-impl/src/cloud/doc-static.ts
Normal file
@ -0,0 +1,24 @@
|
||||
import { fetchWithTraceReport } from '@affine/graphql';
|
||||
|
||||
export class AffineStaticDocStorage {
|
||||
name = 'affine-cloud-static';
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
|
||||
async pull(
|
||||
docId: string
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array | undefined } | null> {
|
||||
const response = await fetchWithTraceReport(
|
||||
`/api/workspaces/${this.workspaceId}/docs/${docId}`,
|
||||
{
|
||||
priority: 'high',
|
||||
}
|
||||
);
|
||||
if (response.ok) {
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
|
||||
return { data: new Uint8Array(arrayBuffer) };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
183
packages/frontend/workspace-impl/src/cloud/doc.ts
Normal file
183
packages/frontend/workspace-impl/src/cloud/doc.ts
Normal file
@ -0,0 +1,183 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { type DocServer, throwIfAborted } from '@toeverything/infra';
|
||||
import type { Socket } from 'socket.io-client';
|
||||
|
||||
import { getIoManager } from '../utils/affine-io';
|
||||
import { base64ToUint8Array, uint8ArrayToBase64 } from '../utils/base64';
|
||||
|
||||
(window as any)._TEST_SIMULATE_SYNC_LAG = Promise.resolve();
|
||||
|
||||
const logger = new DebugLogger('affine-cloud-doc-engine-server');
|
||||
|
||||
export class AffineCloudDocEngineServer implements DocServer {
|
||||
socket = null as unknown as Socket;
|
||||
interruptCb: ((reason: string) => void) | null = null;
|
||||
SEND_TIMEOUT = 30000;
|
||||
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
|
||||
private async clientHandShake() {
|
||||
await this.socket.emitWithAck('client-handshake-sync', {
|
||||
workspaceId: this.workspaceId,
|
||||
version: runtimeConfig.appVersion,
|
||||
});
|
||||
}
|
||||
|
||||
async pullDoc(docId: string, state: Uint8Array) {
|
||||
// for testing
|
||||
await (window as any)._TEST_SIMULATE_SYNC_LAG;
|
||||
|
||||
const stateVector = state ? await uint8ArrayToBase64(state) : undefined;
|
||||
|
||||
const response:
|
||||
| { error: any }
|
||||
| { data: { missing: string; state: string; timestamp: number } } =
|
||||
await this.socket.timeout(this.SEND_TIMEOUT).emitWithAck('doc-load-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
stateVector,
|
||||
});
|
||||
|
||||
if ('error' in response) {
|
||||
// TODO: result `EventError` with server
|
||||
if (response.error.code === 'DOC_NOT_FOUND') {
|
||||
return null;
|
||||
} else {
|
||||
throw new Error(response.error.message);
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
data: base64ToUint8Array(response.data.missing),
|
||||
stateVector: response.data.state
|
||||
? base64ToUint8Array(response.data.state)
|
||||
: undefined,
|
||||
serverClock: response.data.timestamp,
|
||||
};
|
||||
}
|
||||
}
|
||||
async pushDoc(docId: string, data: Uint8Array) {
|
||||
const payload = await uint8ArrayToBase64(data);
|
||||
|
||||
const response: {
|
||||
// TODO: reuse `EventError` with server
|
||||
error?: any;
|
||||
data: { timestamp: number };
|
||||
} = await this.socket
|
||||
.timeout(this.SEND_TIMEOUT)
|
||||
.emitWithAck('client-update-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
updates: [payload],
|
||||
});
|
||||
|
||||
// TODO: raise error with different code to users
|
||||
if (response.error) {
|
||||
logger.error('client-update-v2 error', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
response,
|
||||
});
|
||||
|
||||
throw new Error(response.error);
|
||||
}
|
||||
|
||||
return { serverClock: response.data.timestamp };
|
||||
}
|
||||
async loadServerClock(after: number): Promise<Map<string, number>> {
|
||||
const response: {
|
||||
// TODO: reuse `EventError` with server
|
||||
error?: any;
|
||||
data: Record<string, number>;
|
||||
} = await this.socket
|
||||
.timeout(this.SEND_TIMEOUT)
|
||||
.emitWithAck('client-pre-sync', {
|
||||
workspaceId: this.workspaceId,
|
||||
timestamp: after,
|
||||
});
|
||||
|
||||
if (response.error) {
|
||||
logger.error('client-pre-sync error', {
|
||||
workspaceId: this.workspaceId,
|
||||
response,
|
||||
});
|
||||
|
||||
throw new Error(response.error);
|
||||
}
|
||||
|
||||
return new Map(Object.entries(response.data));
|
||||
}
|
||||
async subscribeAllDocs(
|
||||
cb: (updates: {
|
||||
docId: string;
|
||||
data: Uint8Array;
|
||||
serverClock: number;
|
||||
}) => void
|
||||
): Promise<() => void> {
|
||||
const handleUpdate = async (message: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
updates: string[];
|
||||
timestamp: number;
|
||||
}) => {
|
||||
if (message.workspaceId === this.workspaceId) {
|
||||
message.updates.forEach(update => {
|
||||
cb({
|
||||
docId: message.guid,
|
||||
data: base64ToUint8Array(update),
|
||||
serverClock: message.timestamp,
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
this.socket.on('server-updates', handleUpdate);
|
||||
|
||||
return () => {
|
||||
this.socket.off('server-updates', handleUpdate);
|
||||
};
|
||||
}
|
||||
async waitForConnectingServer(signal: AbortSignal): Promise<void> {
|
||||
const socket = getIoManager().socket('/');
|
||||
this.socket = socket;
|
||||
this.socket.on('server-version-rejected', this.handleVersionRejected);
|
||||
this.socket.on('disconnect', this.handleDisconnect);
|
||||
|
||||
throwIfAborted(signal);
|
||||
if (this.socket.connected) {
|
||||
await this.clientHandShake();
|
||||
} else {
|
||||
this.socket.connect();
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
this.socket.on('connect', () => {
|
||||
resolve();
|
||||
});
|
||||
signal.addEventListener('abort', () => {
|
||||
reject('aborted');
|
||||
});
|
||||
});
|
||||
throwIfAborted(signal);
|
||||
await this.clientHandShake();
|
||||
}
|
||||
}
|
||||
disconnectServer(): void {
|
||||
if (!this.socket) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.socket.emit('client-leave-sync', this.workspaceId);
|
||||
this.socket.off('server-version-rejected', this.handleVersionRejected);
|
||||
this.socket.off('disconnect', this.handleDisconnect);
|
||||
this.socket = null as unknown as Socket;
|
||||
}
|
||||
onInterrupted = (cb: (reason: string) => void) => {
|
||||
this.interruptCb = cb;
|
||||
};
|
||||
handleInterrupted = (reason: string) => {
|
||||
this.interruptCb?.(reason);
|
||||
};
|
||||
handleDisconnect = (reason: Socket.DisconnectReason) => {
|
||||
this.interruptCb?.(reason);
|
||||
};
|
||||
handleVersionRejected = () => {
|
||||
this.interruptCb?.('Client version rejected');
|
||||
};
|
||||
}
|
@ -1,6 +1,4 @@
|
||||
export * from './awareness';
|
||||
export * from './blob';
|
||||
export * from './consts';
|
||||
export { AffineCloudBlobStorage } from './blob';
|
||||
export { CLOUD_WORKSPACE_CHANGED_BROADCAST_CHANNEL_KEY } from './consts';
|
||||
export * from './list';
|
||||
export * from './sync';
|
||||
export * from './workspace-factory';
|
||||
|
@ -10,7 +10,6 @@ import { DocCollection } from '@blocksuite/store';
|
||||
import type { WorkspaceListProvider } from '@toeverything/infra';
|
||||
import {
|
||||
type BlobStorage,
|
||||
type SyncStorage,
|
||||
type WorkspaceInfo,
|
||||
type WorkspaceMetadata,
|
||||
} from '@toeverything/infra';
|
||||
@ -21,10 +20,10 @@ import { applyUpdate, encodeStateAsUpdate } from 'yjs';
|
||||
|
||||
import { IndexedDBBlobStorage } from '../local/blob-indexeddb';
|
||||
import { SQLiteBlobStorage } from '../local/blob-sqlite';
|
||||
import { IndexedDBSyncStorage } from '../local/sync-indexeddb';
|
||||
import { SQLiteSyncStorage } from '../local/sync-sqlite';
|
||||
import { IndexedDBDocStorage } from '../local/doc-indexeddb';
|
||||
import { SqliteDocStorage } from '../local/doc-sqlite';
|
||||
import { CLOUD_WORKSPACE_CHANGED_BROADCAST_CHANNEL_KEY } from './consts';
|
||||
import { AffineStaticSyncStorage } from './sync';
|
||||
import { AffineStaticDocStorage } from './doc-static';
|
||||
|
||||
async function getCloudWorkspaceList() {
|
||||
try {
|
||||
@ -94,17 +93,20 @@ export class CloudWorkspaceListProvider implements WorkspaceListProvider {
|
||||
const blobStorage = environment.isDesktop
|
||||
? new SQLiteBlobStorage(workspaceId)
|
||||
: new IndexedDBBlobStorage(workspaceId);
|
||||
const syncStorage = environment.isDesktop
|
||||
? new SQLiteSyncStorage(workspaceId)
|
||||
: new IndexedDBSyncStorage(workspaceId);
|
||||
const docStorage = environment.isDesktop
|
||||
? new SqliteDocStorage(workspaceId)
|
||||
: new IndexedDBDocStorage(workspaceId);
|
||||
|
||||
// apply initial state
|
||||
await initial(docCollection, blobStorage);
|
||||
|
||||
// save workspace to local storage, should be vary fast
|
||||
await syncStorage.push(workspaceId, encodeStateAsUpdate(docCollection.doc));
|
||||
await docStorage.doc.set(
|
||||
workspaceId,
|
||||
encodeStateAsUpdate(docCollection.doc)
|
||||
);
|
||||
for (const subdocs of docCollection.doc.getSubdocs()) {
|
||||
await syncStorage.push(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
await docStorage.doc.set(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
}
|
||||
|
||||
// notify all browser tabs, so they can update their workspace list
|
||||
@ -155,13 +157,13 @@ export class CloudWorkspaceListProvider implements WorkspaceListProvider {
|
||||
// get information from both cloud and local storage
|
||||
|
||||
// we use affine 'static' storage here, which use http protocol, no need to websocket.
|
||||
const cloudStorage: SyncStorage = new AffineStaticSyncStorage(id);
|
||||
const localStorage = environment.isDesktop
|
||||
? new SQLiteSyncStorage(id)
|
||||
: new IndexedDBSyncStorage(id);
|
||||
const cloudStorage = new AffineStaticDocStorage(id);
|
||||
const docStorage = environment.isDesktop
|
||||
? new SqliteDocStorage(id)
|
||||
: new IndexedDBDocStorage(id);
|
||||
// download root doc
|
||||
const localData = await localStorage.pull(id, new Uint8Array([]));
|
||||
const cloudData = await cloudStorage.pull(id, new Uint8Array([]));
|
||||
const localData = await docStorage.doc.get(id);
|
||||
const cloudData = await cloudStorage.pull(id);
|
||||
|
||||
if (!cloudData && !localData) {
|
||||
return;
|
||||
@ -172,7 +174,7 @@ export class CloudWorkspaceListProvider implements WorkspaceListProvider {
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
|
||||
if (localData) applyUpdate(bs.doc, localData.data);
|
||||
if (localData) applyUpdate(bs.doc, localData);
|
||||
if (cloudData) applyUpdate(bs.doc, cloudData.data);
|
||||
|
||||
return {
|
||||
|
@ -1,208 +0,0 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { fetchWithTraceReport } from '@affine/graphql';
|
||||
import {
|
||||
type RejectByVersion,
|
||||
type SyncErrorMessage,
|
||||
type SyncStorage,
|
||||
} from '@toeverything/infra';
|
||||
import type { CleanupService } from '@toeverything/infra/lifecycle';
|
||||
|
||||
import { getIoManager } from '../utils/affine-io';
|
||||
import { base64ToUint8Array, uint8ArrayToBase64 } from '../utils/base64';
|
||||
|
||||
const logger = new DebugLogger('affine:storage:socketio');
|
||||
|
||||
(window as any)._TEST_SIMULATE_SYNC_LAG = Promise.resolve();
|
||||
|
||||
export class AffineSyncStorage implements SyncStorage {
|
||||
name = 'affine-cloud';
|
||||
|
||||
SEND_TIMEOUT = 30000;
|
||||
|
||||
socket = getIoManager().socket('/');
|
||||
|
||||
errorMessage?: SyncErrorMessage;
|
||||
|
||||
constructor(
|
||||
private readonly workspaceId: string,
|
||||
cleanupService: CleanupService
|
||||
) {
|
||||
this.socket.on('connect', this.handleConnect);
|
||||
this.socket.on('server-version-rejected', this.handleReject);
|
||||
|
||||
if (this.socket.connected) {
|
||||
this.handleConnect();
|
||||
} else {
|
||||
this.socket.connect();
|
||||
}
|
||||
|
||||
cleanupService.add(() => {
|
||||
this.cleanup();
|
||||
});
|
||||
}
|
||||
|
||||
handleConnect = () => {
|
||||
this.socket.emit(
|
||||
'client-handshake-sync',
|
||||
{
|
||||
workspaceId: this.workspaceId,
|
||||
version: runtimeConfig.appVersion,
|
||||
},
|
||||
(res: any) => {
|
||||
logger.debug('client handshake finished', res);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
handleReject = (message: RejectByVersion) => {
|
||||
this.socket.off('server-version-rejected', this.handleReject);
|
||||
this.cleanup();
|
||||
this.socket.disconnect();
|
||||
this.errorMessage = { type: 'outdated', message };
|
||||
};
|
||||
|
||||
async pull(
|
||||
docId: string,
|
||||
state: Uint8Array
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array } | null> {
|
||||
// for testing
|
||||
await (window as any)._TEST_SIMULATE_SYNC_LAG;
|
||||
|
||||
const stateVector = state ? await uint8ArrayToBase64(state) : undefined;
|
||||
|
||||
logger.debug('doc-load-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
stateVector,
|
||||
});
|
||||
|
||||
const response:
|
||||
| { error: any }
|
||||
| { data: { missing: string; state: string } } = await this.socket
|
||||
.timeout(this.SEND_TIMEOUT)
|
||||
.emitWithAck('doc-load-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
stateVector,
|
||||
});
|
||||
|
||||
logger.debug('doc-load callback', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
stateVector,
|
||||
response,
|
||||
});
|
||||
|
||||
if ('error' in response) {
|
||||
// TODO: result `EventError` with server
|
||||
if (response.error.code === 'DOC_NOT_FOUND') {
|
||||
return null;
|
||||
} else {
|
||||
throw new Error(response.error.message);
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
data: base64ToUint8Array(response.data.missing),
|
||||
state: response.data.state
|
||||
? base64ToUint8Array(response.data.state)
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async push(docId: string, update: Uint8Array) {
|
||||
logger.debug('client-update-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
update,
|
||||
});
|
||||
|
||||
const payload = await uint8ArrayToBase64(update);
|
||||
|
||||
const response: {
|
||||
// TODO: reuse `EventError` with server
|
||||
error?: any;
|
||||
data: any;
|
||||
} = await this.socket
|
||||
.timeout(this.SEND_TIMEOUT)
|
||||
.emitWithAck('client-update-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
updates: [payload],
|
||||
});
|
||||
|
||||
// TODO: raise error with different code to users
|
||||
if (response.error) {
|
||||
logger.error('client-update-v2 error', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
response,
|
||||
});
|
||||
|
||||
throw new Error(response.error);
|
||||
}
|
||||
}
|
||||
|
||||
async subscribe(
|
||||
cb: (docId: string, data: Uint8Array) => void,
|
||||
disconnect: (reason: string) => void
|
||||
) {
|
||||
const handleUpdate = async (message: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
updates: string[];
|
||||
}) => {
|
||||
if (message.workspaceId === this.workspaceId) {
|
||||
message.updates.forEach(update => {
|
||||
cb(message.guid, base64ToUint8Array(update));
|
||||
});
|
||||
}
|
||||
};
|
||||
const handleDisconnect = (reason: string) => {
|
||||
this.socket.off('server-updates', handleUpdate);
|
||||
disconnect(reason);
|
||||
};
|
||||
this.socket.on('server-updates', handleUpdate);
|
||||
|
||||
this.socket.on('disconnect', handleDisconnect);
|
||||
|
||||
return () => {
|
||||
this.socket.off('server-updates', handleUpdate);
|
||||
this.socket.off('disconnect', handleDisconnect);
|
||||
};
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
this.socket.emit('client-leave-sync', this.workspaceId);
|
||||
this.socket.off('connect', this.handleConnect);
|
||||
}
|
||||
}
|
||||
|
||||
export class AffineStaticSyncStorage implements SyncStorage {
|
||||
name = 'affine-cloud-static';
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
|
||||
async pull(
|
||||
docId: string
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array | undefined } | null> {
|
||||
const response = await fetchWithTraceReport(
|
||||
`/api/workspaces/${this.workspaceId}/docs/${docId}`,
|
||||
{
|
||||
priority: 'high',
|
||||
}
|
||||
);
|
||||
if (response.ok) {
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
|
||||
return { data: new Uint8Array(arrayBuffer) };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
push(): Promise<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
subscribe(): Promise<() => void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
}
|
@ -3,19 +3,19 @@ import type { WorkspaceFactory } from '@toeverything/infra';
|
||||
import {
|
||||
AwarenessContext,
|
||||
AwarenessProvider,
|
||||
DocServerImpl,
|
||||
RemoteBlobStorage,
|
||||
RemoteSyncStorage,
|
||||
WorkspaceIdContext,
|
||||
WorkspaceScope,
|
||||
} from '@toeverything/infra';
|
||||
import type { ServiceCollection } from '@toeverything/infra/di';
|
||||
import { CleanupService } from '@toeverything/infra/lifecycle';
|
||||
|
||||
import { LocalWorkspaceFactory } from '../local';
|
||||
import { IndexedDBBlobStorage, SQLiteBlobStorage } from '../local';
|
||||
import { IndexedDBBlobStorage } from '../local/blob-indexeddb';
|
||||
import { SQLiteBlobStorage } from '../local/blob-sqlite';
|
||||
import { AffineCloudAwarenessProvider } from './awareness';
|
||||
import { AffineCloudBlobStorage } from './blob';
|
||||
import { AffineSyncStorage } from './sync';
|
||||
import { AffineCloudDocEngineServer } from './doc';
|
||||
|
||||
export class CloudWorkspaceFactory implements WorkspaceFactory {
|
||||
name = WorkspaceFlavour.AFFINE_CLOUD;
|
||||
@ -28,10 +28,7 @@ export class CloudWorkspaceFactory implements WorkspaceFactory {
|
||||
.addImpl(RemoteBlobStorage('affine-cloud'), AffineCloudBlobStorage, [
|
||||
WorkspaceIdContext,
|
||||
])
|
||||
.addImpl(RemoteSyncStorage('affine-cloud'), AffineSyncStorage, [
|
||||
WorkspaceIdContext,
|
||||
CleanupService,
|
||||
])
|
||||
.addImpl(DocServerImpl, AffineCloudDocEngineServer, [WorkspaceIdContext])
|
||||
.addImpl(
|
||||
AwarenessProvider('affine-cloud'),
|
||||
AffineCloudAwarenessProvider,
|
||||
|
@ -9,11 +9,8 @@ import {
|
||||
} from '@toeverything/infra';
|
||||
|
||||
import { CloudWorkspaceFactory, CloudWorkspaceListProvider } from './cloud';
|
||||
import {
|
||||
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
|
||||
LocalWorkspaceFactory,
|
||||
LocalWorkspaceListProvider,
|
||||
} from './local';
|
||||
import { LocalWorkspaceFactory, LocalWorkspaceListProvider } from './local';
|
||||
import { LOCAL_WORKSPACE_LOCAL_STORAGE_KEY } from './local/consts';
|
||||
import { WorkspaceLocalStateImpl } from './local-state';
|
||||
|
||||
export * from './cloud';
|
||||
|
@ -1,31 +1,38 @@
|
||||
import type {
|
||||
GlobalState,
|
||||
Workspace,
|
||||
WorkspaceLocalState,
|
||||
import {
|
||||
type GlobalState,
|
||||
type Memento,
|
||||
type Workspace,
|
||||
type WorkspaceLocalState,
|
||||
wrapMemento,
|
||||
} from '@toeverything/infra';
|
||||
|
||||
export class WorkspaceLocalStateImpl implements WorkspaceLocalState {
|
||||
constructor(
|
||||
private readonly workspace: Workspace,
|
||||
private readonly globalState: GlobalState
|
||||
) {}
|
||||
wrapped: Memento;
|
||||
constructor(workspace: Workspace, globalState: GlobalState) {
|
||||
this.wrapped = wrapMemento(globalState, `workspace-state:${workspace.id}:`);
|
||||
}
|
||||
|
||||
keys(): string[] {
|
||||
return this.wrapped.keys();
|
||||
}
|
||||
|
||||
get<T>(key: string): T | null {
|
||||
return this.globalState.get<T>(
|
||||
`workspace-state:${this.workspace.id}:${key}`
|
||||
);
|
||||
return this.wrapped.get<T>(key);
|
||||
}
|
||||
|
||||
watch<T>(key: string) {
|
||||
return this.globalState.watch<T>(
|
||||
`workspace-state:${this.workspace.id}:${key}`
|
||||
);
|
||||
return this.wrapped.watch<T>(key);
|
||||
}
|
||||
|
||||
set<T>(key: string, value: T | null): void {
|
||||
return this.globalState.set<T>(
|
||||
`workspace-state:${this.workspace.id}:${key}`,
|
||||
value
|
||||
);
|
||||
return this.wrapped.set<T>(key, value);
|
||||
}
|
||||
|
||||
del(key: string): void {
|
||||
return this.wrapped.del(key);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
return this.wrapped.clear();
|
||||
}
|
||||
}
|
||||
|
@ -1,195 +0,0 @@
|
||||
import 'fake-indexeddb/auto';
|
||||
|
||||
import { AffineSchemas } from '@blocksuite/blocks/schemas';
|
||||
import { DocCollection, Schema } from '@blocksuite/store';
|
||||
import { SyncEngine, SyncEngineStep, SyncPeerStep } from '@toeverything/infra';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import { Doc } from 'yjs';
|
||||
|
||||
import { IndexedDBSyncStorage } from '..';
|
||||
import { createTestStorage } from './test-storage';
|
||||
|
||||
const schema = new Schema();
|
||||
|
||||
schema.register(AffineSchemas);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
|
||||
});
|
||||
|
||||
describe('SyncEngine', () => {
|
||||
test('basic - indexeddb', async () => {
|
||||
let prev: any;
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncengine - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid),
|
||||
[
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid + '1'),
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid + '2'),
|
||||
]
|
||||
);
|
||||
syncEngine.start();
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
page.load();
|
||||
const pageBlockId = page.addBlock(
|
||||
'affine:page' as keyof BlockSuite.BlockModels,
|
||||
{
|
||||
title: new page.Text(''),
|
||||
}
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:surface' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
const frameId = page.addBlock(
|
||||
'affine:note' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:paragraph' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
frameId
|
||||
);
|
||||
await syncEngine.waitForSynced();
|
||||
syncEngine.forceStop();
|
||||
prev = docCollection.doc.toJSON();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncengine - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid),
|
||||
[]
|
||||
);
|
||||
syncEngine.start();
|
||||
await syncEngine.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncEngine.forceStop();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncengine - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid + '1'),
|
||||
[]
|
||||
);
|
||||
syncEngine.start();
|
||||
await syncEngine.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncEngine.forceStop();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncengine - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid + '2'),
|
||||
[]
|
||||
);
|
||||
syncEngine.start();
|
||||
await syncEngine.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncEngine.forceStop();
|
||||
}
|
||||
});
|
||||
|
||||
test('status', async () => {
|
||||
const ydoc = new Doc({ guid: 'test - syncengine - status' });
|
||||
|
||||
const localStorage = createTestStorage(new IndexedDBSyncStorage(ydoc.guid));
|
||||
const remoteStorage = createTestStorage(
|
||||
new IndexedDBSyncStorage(ydoc.guid + '1')
|
||||
);
|
||||
|
||||
localStorage.pausePull();
|
||||
localStorage.pausePush();
|
||||
remoteStorage.pausePull();
|
||||
remoteStorage.pausePush();
|
||||
|
||||
const syncEngine = new SyncEngine(ydoc, localStorage, [remoteStorage]);
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Stopped);
|
||||
|
||||
syncEngine.start();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(
|
||||
SyncPeerStep.LoadingRootDoc
|
||||
);
|
||||
});
|
||||
|
||||
localStorage.resumePull();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(
|
||||
SyncPeerStep.LoadingRootDoc
|
||||
);
|
||||
});
|
||||
|
||||
remoteStorage.resumePull();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
});
|
||||
|
||||
ydoc.getArray('test').insert(0, [1, 2, 3]);
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Syncing);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Syncing);
|
||||
});
|
||||
|
||||
localStorage.resumePush();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Syncing);
|
||||
});
|
||||
|
||||
remoteStorage.resumePush();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Synced);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Synced);
|
||||
});
|
||||
});
|
||||
});
|
@ -1,109 +0,0 @@
|
||||
import 'fake-indexeddb/auto';
|
||||
|
||||
import { AffineSchemas } from '@blocksuite/blocks/schemas';
|
||||
import { DocCollection, Schema } from '@blocksuite/store';
|
||||
import { SyncPeer, SyncPeerStep } from '@toeverything/infra';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import { IndexedDBSyncStorage } from '..';
|
||||
|
||||
const schema = new Schema();
|
||||
|
||||
schema.register(AffineSchemas);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
|
||||
});
|
||||
|
||||
describe('SyncPeer', () => {
|
||||
test('basic - indexeddb', async () => {
|
||||
let prev: any;
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncpeer - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid)
|
||||
);
|
||||
await syncPeer.waitForLoaded();
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
page.load();
|
||||
const pageBlockId = page.addBlock(
|
||||
'affine:page' as keyof BlockSuite.BlockModels,
|
||||
{
|
||||
title: new page.Text(''),
|
||||
}
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:surface' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
const frameId = page.addBlock(
|
||||
'affine:note' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:paragraph' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
frameId
|
||||
);
|
||||
await syncPeer.waitForSynced();
|
||||
syncPeer.stop();
|
||||
prev = docCollection.doc.toJSON();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncpeer - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid)
|
||||
);
|
||||
await syncPeer.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncPeer.stop();
|
||||
}
|
||||
});
|
||||
|
||||
test('status', async () => {
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncpeer - status',
|
||||
|
||||
schema,
|
||||
});
|
||||
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid)
|
||||
);
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.LoadingRootDoc);
|
||||
await syncPeer.waitForSynced();
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.Synced);
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.LoadingSubDoc);
|
||||
page.load();
|
||||
await syncPeer.waitForSynced();
|
||||
page.addBlock('affine:page' as keyof BlockSuite.BlockModels, {
|
||||
title: new page.Text(''),
|
||||
});
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.Syncing);
|
||||
syncPeer.stop();
|
||||
});
|
||||
});
|
@ -1,42 +0,0 @@
|
||||
import type { SyncStorage } from '@toeverything/infra';
|
||||
|
||||
export function createTestStorage(origin: SyncStorage) {
|
||||
const controler = {
|
||||
pausedPull: Promise.resolve(),
|
||||
resumePull: () => {},
|
||||
pausedPush: Promise.resolve(),
|
||||
resumePush: () => {},
|
||||
};
|
||||
|
||||
return {
|
||||
name: `${origin.name}(testing)`,
|
||||
pull(docId: string, state: Uint8Array) {
|
||||
return controler.pausedPull.then(() => origin.pull(docId, state));
|
||||
},
|
||||
push(docId: string, data: Uint8Array) {
|
||||
return controler.pausedPush.then(() => origin.push(docId, data));
|
||||
},
|
||||
subscribe(
|
||||
cb: (docId: string, data: Uint8Array) => void,
|
||||
disconnect: (reason: string) => void
|
||||
) {
|
||||
return origin.subscribe(cb, disconnect);
|
||||
},
|
||||
pausePull() {
|
||||
controler.pausedPull = new Promise(resolve => {
|
||||
controler.resumePull = resolve;
|
||||
});
|
||||
},
|
||||
resumePull() {
|
||||
controler.resumePull?.();
|
||||
},
|
||||
pausePush() {
|
||||
controler.pausedPush = new Promise(resolve => {
|
||||
controler.resumePush = resolve;
|
||||
});
|
||||
},
|
||||
resumePush() {
|
||||
controler.resumePush?.();
|
||||
},
|
||||
};
|
||||
}
|
@ -0,0 +1,60 @@
|
||||
import type { DocEvent, DocEventBus } from '@toeverything/infra';
|
||||
|
||||
type LegacyChannelMessage = {
|
||||
type: 'db-updated';
|
||||
payload: {
|
||||
docId: string;
|
||||
update: Uint8Array;
|
||||
};
|
||||
__from_new_doc_engine?: boolean;
|
||||
};
|
||||
|
||||
export class BroadcastChannelDocEventBus implements DocEventBus {
|
||||
legacyChannel = new BroadcastChannel('indexeddb:' + this.workspaceId);
|
||||
senderChannel = new BroadcastChannel('doc:' + this.workspaceId);
|
||||
constructor(private readonly workspaceId: string) {
|
||||
this.legacyChannel.addEventListener(
|
||||
'message',
|
||||
(event: MessageEvent<LegacyChannelMessage>) => {
|
||||
if (event.data.__from_new_doc_engine) {
|
||||
return;
|
||||
}
|
||||
if (event.data.type === 'db-updated') {
|
||||
this.emit({
|
||||
type: 'LegacyClientUpdateCommitted',
|
||||
docId: event.data.payload.docId,
|
||||
update: event.data.payload.update,
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
emit(event: DocEvent): void {
|
||||
if (
|
||||
event.type === 'ClientUpdateCommitted' ||
|
||||
event.type === 'ServerUpdateCommitted'
|
||||
) {
|
||||
this.legacyChannel.postMessage({
|
||||
type: 'db-updated',
|
||||
payload: {
|
||||
docId: event.docId,
|
||||
update: event.update,
|
||||
},
|
||||
__from_new_doc_engine: true,
|
||||
} satisfies LegacyChannelMessage);
|
||||
}
|
||||
this.senderChannel.postMessage(event);
|
||||
}
|
||||
|
||||
on(cb: (event: DocEvent) => void): () => void {
|
||||
const listener = (event: MessageEvent<DocEvent>) => {
|
||||
cb(event.data);
|
||||
};
|
||||
const channel = new BroadcastChannel('doc:' + this.workspaceId);
|
||||
channel.addEventListener('message', listener);
|
||||
return () => {
|
||||
channel.removeEventListener('message', listener);
|
||||
channel.close();
|
||||
};
|
||||
}
|
||||
}
|
246
packages/frontend/workspace-impl/src/local/doc-indexeddb.ts
Normal file
246
packages/frontend/workspace-impl/src/local/doc-indexeddb.ts
Normal file
@ -0,0 +1,246 @@
|
||||
import type { ByteKV, ByteKVBehavior, DocStorage } from '@toeverything/infra';
|
||||
import {
|
||||
type DBSchema,
|
||||
type IDBPDatabase,
|
||||
type IDBPObjectStore,
|
||||
openDB,
|
||||
} from 'idb';
|
||||
import { mergeUpdates } from 'yjs';
|
||||
|
||||
import { BroadcastChannelDocEventBus } from './doc-broadcast-channel';
|
||||
|
||||
function isEmptyUpdate(binary: Uint8Array) {
|
||||
return (
|
||||
binary.byteLength === 0 ||
|
||||
(binary.byteLength === 2 && binary[0] === 0 && binary[1] === 0)
|
||||
);
|
||||
}
|
||||
|
||||
export class IndexedDBDocStorage implements DocStorage {
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
eventBus = new BroadcastChannelDocEventBus(this.workspaceId);
|
||||
readonly doc = new Doc();
|
||||
readonly syncMetadata = new KV(`${this.workspaceId}:sync-metadata`);
|
||||
readonly serverClock = new KV(`${this.workspaceId}:server-clock`);
|
||||
}
|
||||
|
||||
interface DocDBSchema extends DBSchema {
|
||||
workspace: {
|
||||
key: string;
|
||||
value: {
|
||||
id: string;
|
||||
updates: {
|
||||
timestamp: number;
|
||||
update: Uint8Array;
|
||||
}[];
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
type DocType = DocStorage['doc'];
|
||||
class Doc implements DocType {
|
||||
dbName = 'affine-local';
|
||||
dbPromise: Promise<IDBPDatabase<DocDBSchema>> | null = null;
|
||||
dbVersion = 1;
|
||||
|
||||
constructor() {}
|
||||
|
||||
upgradeDB(db: IDBPDatabase<DocDBSchema>) {
|
||||
db.createObjectStore('workspace', { keyPath: 'id' });
|
||||
}
|
||||
|
||||
getDb() {
|
||||
if (this.dbPromise === null) {
|
||||
this.dbPromise = openDB<DocDBSchema>(this.dbName, this.dbVersion, {
|
||||
upgrade: db => this.upgradeDB(db),
|
||||
});
|
||||
}
|
||||
return this.dbPromise;
|
||||
}
|
||||
|
||||
async get(docId: string): Promise<Uint8Array | null> {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readonly')
|
||||
.objectStore('workspace');
|
||||
const data = await store.get(docId);
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const updates = data.updates
|
||||
.map(({ update }) => update)
|
||||
.filter(update => !isEmptyUpdate(update));
|
||||
const update = updates.length > 0 ? mergeUpdates(updates) : null;
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
async set(docId: string, data: Uint8Array) {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readwrite')
|
||||
.objectStore('workspace');
|
||||
|
||||
const rows = [{ timestamp: Date.now(), update: data }];
|
||||
await store.put({
|
||||
id: docId,
|
||||
updates: rows,
|
||||
});
|
||||
}
|
||||
|
||||
async keys() {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readonly')
|
||||
.objectStore('workspace');
|
||||
|
||||
return store.getAllKeys();
|
||||
}
|
||||
|
||||
clear(): void | Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
del(_key: string): void | Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
async transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readwrite')
|
||||
.objectStore('workspace');
|
||||
return await cb({
|
||||
async get(docId) {
|
||||
const data = await store.get(docId);
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { updates } = data;
|
||||
const update = mergeUpdates(updates.map(({ update }) => update));
|
||||
|
||||
return update;
|
||||
},
|
||||
keys() {
|
||||
return store.getAllKeys();
|
||||
},
|
||||
async set(docId, data) {
|
||||
const rows = [{ timestamp: Date.now(), update: data }];
|
||||
await store.put({
|
||||
id: docId,
|
||||
updates: rows,
|
||||
});
|
||||
},
|
||||
async clear() {
|
||||
return await store.clear();
|
||||
},
|
||||
async del(key) {
|
||||
return store.delete(key);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
interface KvDBSchema extends DBSchema {
|
||||
kv: {
|
||||
key: string;
|
||||
value: { key: string; val: Uint8Array };
|
||||
};
|
||||
}
|
||||
|
||||
class KV implements ByteKV {
|
||||
constructor(private readonly dbName: string) {}
|
||||
|
||||
dbPromise: Promise<IDBPDatabase<KvDBSchema>> | null = null;
|
||||
dbVersion = 1;
|
||||
|
||||
upgradeDB(db: IDBPDatabase<KvDBSchema>) {
|
||||
db.createObjectStore('kv', { keyPath: 'key' });
|
||||
}
|
||||
|
||||
getDb() {
|
||||
if (this.dbPromise === null) {
|
||||
this.dbPromise = openDB<KvDBSchema>(this.dbName, this.dbVersion, {
|
||||
upgrade: db => this.upgradeDB(db),
|
||||
});
|
||||
}
|
||||
return this.dbPromise;
|
||||
}
|
||||
|
||||
async transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
|
||||
const behavior = new KVBehavior(store);
|
||||
return await cb(behavior);
|
||||
}
|
||||
|
||||
async get(key: string): Promise<Uint8Array | null> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readonly').objectStore('kv');
|
||||
return new KVBehavior(store).get(key);
|
||||
}
|
||||
async set(key: string, value: Uint8Array): Promise<void> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).set(key, value);
|
||||
}
|
||||
async keys(): Promise<string[]> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).keys();
|
||||
}
|
||||
async clear() {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).clear();
|
||||
}
|
||||
async del(key: string) {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).del(key);
|
||||
}
|
||||
}
|
||||
|
||||
class KVBehavior implements ByteKVBehavior {
|
||||
constructor(
|
||||
private readonly store: IDBPObjectStore<KvDBSchema, ['kv'], 'kv', any>
|
||||
) {}
|
||||
async get(key: string): Promise<Uint8Array | null> {
|
||||
const value = await this.store.get(key);
|
||||
return value?.val ?? null;
|
||||
}
|
||||
async set(key: string, value: Uint8Array): Promise<void> {
|
||||
if (this.store.put === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
await this.store.put({
|
||||
key: key,
|
||||
val: value,
|
||||
});
|
||||
}
|
||||
async keys(): Promise<string[]> {
|
||||
return await this.store.getAllKeys();
|
||||
}
|
||||
async del(key: string) {
|
||||
if (this.store.delete === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
return await this.store.delete(key);
|
||||
}
|
||||
|
||||
async clear() {
|
||||
if (this.store.clear === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
return await this.store.clear();
|
||||
}
|
||||
}
|
186
packages/frontend/workspace-impl/src/local/doc-sqlite.ts
Normal file
186
packages/frontend/workspace-impl/src/local/doc-sqlite.ts
Normal file
@ -0,0 +1,186 @@
|
||||
import { apis } from '@affine/electron-api';
|
||||
import {
|
||||
AsyncLock,
|
||||
type ByteKV,
|
||||
type ByteKVBehavior,
|
||||
type DocStorage,
|
||||
MemoryDocEventBus,
|
||||
} from '@toeverything/infra';
|
||||
import {
|
||||
type DBSchema,
|
||||
type IDBPDatabase,
|
||||
type IDBPObjectStore,
|
||||
openDB,
|
||||
} from 'idb';
|
||||
|
||||
export class SqliteDocStorage implements DocStorage {
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
eventBus = new MemoryDocEventBus();
|
||||
readonly doc = new Doc(this.workspaceId);
|
||||
readonly syncMetadata = new KV(`${this.workspaceId}:sync-metadata`);
|
||||
readonly serverClock = new KV(`${this.workspaceId}:server-clock`);
|
||||
}
|
||||
|
||||
type DocType = DocStorage['doc'];
|
||||
|
||||
class Doc implements DocType {
|
||||
lock = new AsyncLock();
|
||||
constructor(private readonly workspaceId: string) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
}
|
||||
|
||||
async transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
using _lock = await this.lock.acquire();
|
||||
return await cb(this);
|
||||
}
|
||||
|
||||
keys(): string[] | Promise<string[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
async get(docId: string) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
const update = await apis.db.getDocAsUpdates(
|
||||
this.workspaceId,
|
||||
this.workspaceId === docId ? undefined : docId
|
||||
);
|
||||
|
||||
if (update) {
|
||||
if (
|
||||
update.byteLength === 0 ||
|
||||
(update.byteLength === 2 && update[0] === 0 && update[1] === 0)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async set(docId: string, data: Uint8Array) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
await apis.db.applyDocUpdate(
|
||||
this.workspaceId,
|
||||
data,
|
||||
this.workspaceId === docId ? undefined : docId
|
||||
);
|
||||
}
|
||||
|
||||
clear(): void | Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
del(): void | Promise<void> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
interface KvDBSchema extends DBSchema {
|
||||
kv: {
|
||||
key: string;
|
||||
value: { key: string; val: Uint8Array };
|
||||
};
|
||||
}
|
||||
|
||||
class KV implements ByteKV {
|
||||
constructor(private readonly dbName: string) {}
|
||||
|
||||
dbPromise: Promise<IDBPDatabase<KvDBSchema>> | null = null;
|
||||
dbVersion = 1;
|
||||
|
||||
upgradeDB(db: IDBPDatabase<KvDBSchema>) {
|
||||
db.createObjectStore('kv', { keyPath: 'key' });
|
||||
}
|
||||
|
||||
getDb() {
|
||||
if (this.dbPromise === null) {
|
||||
this.dbPromise = openDB<KvDBSchema>(this.dbName, this.dbVersion, {
|
||||
upgrade: db => this.upgradeDB(db),
|
||||
});
|
||||
}
|
||||
return this.dbPromise;
|
||||
}
|
||||
|
||||
async transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
|
||||
const behavior = new KVBehavior(store);
|
||||
return await cb(behavior);
|
||||
}
|
||||
|
||||
async get(key: string): Promise<Uint8Array | null> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readonly').objectStore('kv');
|
||||
return new KVBehavior(store).get(key);
|
||||
}
|
||||
async set(key: string, value: Uint8Array): Promise<void> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).set(key, value);
|
||||
}
|
||||
async keys(): Promise<string[]> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).keys();
|
||||
}
|
||||
async clear() {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).clear();
|
||||
}
|
||||
async del(key: string) {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).del(key);
|
||||
}
|
||||
}
|
||||
|
||||
class KVBehavior implements ByteKVBehavior {
|
||||
constructor(
|
||||
private readonly store: IDBPObjectStore<KvDBSchema, ['kv'], 'kv', any>
|
||||
) {}
|
||||
|
||||
async get(key: string): Promise<Uint8Array | null> {
|
||||
const value = await this.store.get(key);
|
||||
return value?.val ?? null;
|
||||
}
|
||||
async set(key: string, value: Uint8Array): Promise<void> {
|
||||
if (this.store.put === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
await this.store.put({
|
||||
key: key,
|
||||
val: value,
|
||||
});
|
||||
}
|
||||
async keys(): Promise<string[]> {
|
||||
return await this.store.getAllKeys();
|
||||
}
|
||||
|
||||
async del(key: string) {
|
||||
if (this.store.delete === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
return await this.store.delete(key);
|
||||
}
|
||||
|
||||
async clear() {
|
||||
if (this.store.clear === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
return await this.store.clear();
|
||||
}
|
||||
}
|
@ -1,9 +1,3 @@
|
||||
export * from './awareness';
|
||||
export * from './blob-indexeddb';
|
||||
export * from './blob-sqlite';
|
||||
export * from './blob-static';
|
||||
export * from './consts';
|
||||
export { StaticBlobStorage } from './blob-static';
|
||||
export * from './list';
|
||||
export * from './sync-indexeddb';
|
||||
export * from './sync-sqlite';
|
||||
export * from './workspace-factory';
|
||||
|
@ -18,8 +18,8 @@ import {
|
||||
LOCAL_WORKSPACE_CREATED_BROADCAST_CHANNEL_KEY,
|
||||
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
|
||||
} from './consts';
|
||||
import { IndexedDBSyncStorage } from './sync-indexeddb';
|
||||
import { SQLiteSyncStorage } from './sync-sqlite';
|
||||
import { IndexedDBDocStorage } from './doc-indexeddb';
|
||||
import { SqliteDocStorage } from './doc-sqlite';
|
||||
|
||||
export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
name = WorkspaceFlavour.LOCAL;
|
||||
@ -62,9 +62,9 @@ export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
const blobStorage = environment.isDesktop
|
||||
? new SQLiteBlobStorage(id)
|
||||
: new IndexedDBBlobStorage(id);
|
||||
const syncStorage = environment.isDesktop
|
||||
? new SQLiteSyncStorage(id)
|
||||
: new IndexedDBSyncStorage(id);
|
||||
const docStorage = environment.isDesktop
|
||||
? new SqliteDocStorage(id)
|
||||
: new IndexedDBDocStorage(id);
|
||||
|
||||
const workspace = new DocCollection({
|
||||
id: id,
|
||||
@ -76,9 +76,9 @@ export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
await initial(workspace, blobStorage);
|
||||
|
||||
// save workspace to local storage
|
||||
await syncStorage.push(id, encodeStateAsUpdate(workspace.doc));
|
||||
await docStorage.doc.set(id, encodeStateAsUpdate(workspace.doc));
|
||||
for (const subdocs of workspace.doc.getSubdocs()) {
|
||||
await syncStorage.push(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
await docStorage.doc.set(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
}
|
||||
|
||||
// save workspace id to local storage
|
||||
@ -128,9 +128,9 @@ export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
async getInformation(id: string): Promise<WorkspaceInfo | undefined> {
|
||||
// get information from root doc
|
||||
const storage = environment.isDesktop
|
||||
? new SQLiteSyncStorage(id)
|
||||
: new IndexedDBSyncStorage(id);
|
||||
const data = await storage.pull(id, new Uint8Array([]));
|
||||
? new SqliteDocStorage(id)
|
||||
: new IndexedDBDocStorage(id);
|
||||
const data = await storage.doc.get(id);
|
||||
|
||||
if (!data) {
|
||||
return;
|
||||
@ -141,7 +141,7 @@ export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
|
||||
applyUpdate(bs.doc, data.data);
|
||||
applyUpdate(bs.doc, data);
|
||||
|
||||
return {
|
||||
name: bs.meta.name,
|
||||
|
@ -1,118 +0,0 @@
|
||||
import { mergeUpdates, type SyncStorage } from '@toeverything/infra';
|
||||
import { type DBSchema, type IDBPDatabase, openDB } from 'idb';
|
||||
import { diffUpdate, encodeStateVectorFromUpdate } from 'yjs';
|
||||
|
||||
export const dbVersion = 1;
|
||||
export const DEFAULT_DB_NAME = 'affine-local';
|
||||
|
||||
type UpdateMessage = {
|
||||
timestamp: number;
|
||||
update: Uint8Array;
|
||||
};
|
||||
|
||||
type WorkspacePersist = {
|
||||
id: string;
|
||||
updates: UpdateMessage[];
|
||||
};
|
||||
|
||||
interface BlockSuiteBinaryDB extends DBSchema {
|
||||
workspace: {
|
||||
key: string;
|
||||
value: WorkspacePersist;
|
||||
};
|
||||
milestone: {
|
||||
key: string;
|
||||
value: unknown;
|
||||
};
|
||||
}
|
||||
|
||||
export function upgradeDB(db: IDBPDatabase<BlockSuiteBinaryDB>) {
|
||||
db.createObjectStore('workspace', { keyPath: 'id' });
|
||||
db.createObjectStore('milestone', { keyPath: 'id' });
|
||||
}
|
||||
|
||||
type ChannelMessage = {
|
||||
type: 'db-updated';
|
||||
payload: { docId: string; update: Uint8Array };
|
||||
};
|
||||
|
||||
export class IndexedDBSyncStorage implements SyncStorage {
|
||||
name = 'indexeddb';
|
||||
dbName = DEFAULT_DB_NAME;
|
||||
mergeCount = 1;
|
||||
dbPromise: Promise<IDBPDatabase<BlockSuiteBinaryDB>> | null = null;
|
||||
// indexeddb could be shared between tabs, so we use broadcast channel to notify other tabs
|
||||
channel = new BroadcastChannel('indexeddb:' + this.workspaceId);
|
||||
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
|
||||
getDb() {
|
||||
if (this.dbPromise === null) {
|
||||
this.dbPromise = openDB<BlockSuiteBinaryDB>(this.dbName, dbVersion, {
|
||||
upgrade: upgradeDB,
|
||||
});
|
||||
}
|
||||
return this.dbPromise;
|
||||
}
|
||||
|
||||
async pull(
|
||||
docId: string,
|
||||
state: Uint8Array
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array | undefined } | null> {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readonly')
|
||||
.objectStore('workspace');
|
||||
const data = await store.get(docId);
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { updates } = data;
|
||||
const update = mergeUpdates(updates.map(({ update }) => update));
|
||||
|
||||
const diff = state.length ? diffUpdate(update, state) : update;
|
||||
|
||||
return { data: diff, state: encodeStateVectorFromUpdate(update) };
|
||||
}
|
||||
|
||||
async push(docId: string, data: Uint8Array): Promise<void> {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readwrite')
|
||||
.objectStore('workspace');
|
||||
|
||||
// TODO: maybe we do not need to get data every time
|
||||
const { updates } = (await store.get(docId)) ?? { updates: [] };
|
||||
let rows: UpdateMessage[] = [
|
||||
...updates,
|
||||
{ timestamp: Date.now(), update: data },
|
||||
];
|
||||
if (this.mergeCount && rows.length >= this.mergeCount) {
|
||||
const merged = mergeUpdates(rows.map(({ update }) => update));
|
||||
rows = [{ timestamp: Date.now(), update: merged }];
|
||||
}
|
||||
await store.put({
|
||||
id: docId,
|
||||
updates: rows,
|
||||
});
|
||||
this.channel.postMessage({
|
||||
type: 'db-updated',
|
||||
payload: { docId, update: data },
|
||||
} satisfies ChannelMessage);
|
||||
}
|
||||
async subscribe(cb: (docId: string, data: Uint8Array) => void) {
|
||||
function onMessage(event: MessageEvent<ChannelMessage>) {
|
||||
const { type, payload } = event.data;
|
||||
if (type === 'db-updated') {
|
||||
const { docId, update } = payload;
|
||||
cb(docId, update);
|
||||
}
|
||||
}
|
||||
this.channel.addEventListener('message', onMessage);
|
||||
return () => {
|
||||
this.channel.removeEventListener('message', onMessage);
|
||||
};
|
||||
}
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
import { apis } from '@affine/electron-api';
|
||||
import { type SyncStorage } from '@toeverything/infra';
|
||||
import { encodeStateVectorFromUpdate } from 'yjs';
|
||||
|
||||
export class SQLiteSyncStorage implements SyncStorage {
|
||||
name = 'sqlite';
|
||||
constructor(private readonly workspaceId: string) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
}
|
||||
|
||||
async pull(docId: string, _state: Uint8Array) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
const update = await apis.db.getDocAsUpdates(
|
||||
this.workspaceId,
|
||||
this.workspaceId === docId ? undefined : docId
|
||||
);
|
||||
|
||||
if (update) {
|
||||
if (
|
||||
update.byteLength === 0 ||
|
||||
(update.byteLength === 2 && update[0] === 0 && update[1] === 0)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
data: update,
|
||||
state: encodeStateVectorFromUpdate(update),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async push(docId: string, data: Uint8Array) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
return apis.db.applyDocUpdate(
|
||||
this.workspaceId,
|
||||
data,
|
||||
this.workspaceId === docId ? undefined : docId
|
||||
);
|
||||
}
|
||||
|
||||
async subscribe() {
|
||||
return () => {};
|
||||
}
|
||||
}
|
@ -2,8 +2,8 @@ import type { ServiceCollection, WorkspaceFactory } from '@toeverything/infra';
|
||||
import {
|
||||
AwarenessContext,
|
||||
AwarenessProvider,
|
||||
DocStorageImpl,
|
||||
LocalBlobStorage,
|
||||
LocalSyncStorage,
|
||||
RemoteBlobStorage,
|
||||
WorkspaceIdContext,
|
||||
WorkspaceScope,
|
||||
@ -13,8 +13,8 @@ import { BroadcastChannelAwarenessProvider } from './awareness';
|
||||
import { IndexedDBBlobStorage } from './blob-indexeddb';
|
||||
import { SQLiteBlobStorage } from './blob-sqlite';
|
||||
import { StaticBlobStorage } from './blob-static';
|
||||
import { IndexedDBSyncStorage } from './sync-indexeddb';
|
||||
import { SQLiteSyncStorage } from './sync-sqlite';
|
||||
import { IndexedDBDocStorage } from './doc-indexeddb';
|
||||
import { SqliteDocStorage } from './doc-sqlite';
|
||||
|
||||
export class LocalWorkspaceFactory implements WorkspaceFactory {
|
||||
name = 'local';
|
||||
@ -23,12 +23,12 @@ export class LocalWorkspaceFactory implements WorkspaceFactory {
|
||||
services
|
||||
.scope(WorkspaceScope)
|
||||
.addImpl(LocalBlobStorage, SQLiteBlobStorage, [WorkspaceIdContext])
|
||||
.addImpl(LocalSyncStorage, SQLiteSyncStorage, [WorkspaceIdContext]);
|
||||
.addImpl(DocStorageImpl, SqliteDocStorage, [WorkspaceIdContext]);
|
||||
} else {
|
||||
services
|
||||
.scope(WorkspaceScope)
|
||||
.addImpl(LocalBlobStorage, IndexedDBBlobStorage, [WorkspaceIdContext])
|
||||
.addImpl(LocalSyncStorage, IndexedDBSyncStorage, [WorkspaceIdContext]);
|
||||
.addImpl(DocStorageImpl, IndexedDBDocStorage, [WorkspaceIdContext]);
|
||||
}
|
||||
|
||||
services
|
||||
|
@ -68,8 +68,8 @@ test('can enable share page', async ({ page, browser }) => {
|
||||
await page2.goto(url);
|
||||
await waitForEditorLoad(page2);
|
||||
const title = getBlockSuiteEditorTitle(page2);
|
||||
expect(await title.innerText()).toBe('TEST TITLE');
|
||||
expect(await page2.textContent('affine-paragraph')).toContain(
|
||||
await expect(title).toContainText('TEST TITLE');
|
||||
expect(page2.locator('affine-paragraph').first()).toContainText(
|
||||
'TEST CONTENT'
|
||||
);
|
||||
}
|
||||
@ -112,7 +112,7 @@ test('share page with default edgeless', async ({ page, browser }) => {
|
||||
await expect(page.locator('affine-edgeless-root')).toBeVisible({
|
||||
timeout: 1000,
|
||||
});
|
||||
expect(await page2.textContent('affine-paragraph')).toContain(
|
||||
expect(page2.locator('affine-paragraph').first()).toContainText(
|
||||
'TEST CONTENT'
|
||||
);
|
||||
const editButton = page2.getByTestId('share-page-edit-button');
|
||||
@ -155,7 +155,7 @@ test('can collaborate with other user and name should display when editing', asy
|
||||
await page2.waitForTimeout(200);
|
||||
{
|
||||
const title = getBlockSuiteEditorTitle(page2);
|
||||
expect(await title.innerText()).toBe('TEST TITLE');
|
||||
await expect(title).toHaveText('TEST TITLE');
|
||||
const typingPromise = (async () => {
|
||||
await page.keyboard.press('Enter', { delay: 50 });
|
||||
await page.keyboard.type('TEST CONTENT', { delay: 50 });
|
||||
|
@ -1,6 +1,5 @@
|
||||
/* eslint-disable unicorn/prefer-dom-node-dataset */
|
||||
import { test } from '@affine-test/kit/playwright';
|
||||
import { clickPageModeButton } from '@affine-test/kit/utils/editor';
|
||||
import {
|
||||
openHomePage,
|
||||
openJournalsPage,
|
||||
@ -28,7 +27,6 @@ import { expect } from '@playwright/test';
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
await openHomePage(page);
|
||||
await clickPageModeButton(page);
|
||||
await clickNewPageButton(page);
|
||||
await waitForEmptyEditor(page);
|
||||
await ensurePagePropertiesVisible(page);
|
||||
|
17
yarn.lock
17
yarn.lock
@ -470,6 +470,7 @@ __metadata:
|
||||
"@types/uuid": "npm:^9.0.8"
|
||||
async-call-rpc: "npm:^6.4.0"
|
||||
builder-util-runtime: "npm:^9.2.4"
|
||||
core-js: "npm:^3.36.1"
|
||||
cross-env: "npm:^7.0.3"
|
||||
electron: "npm:^29.0.1"
|
||||
electron-log: "npm:^5.1.1"
|
||||
@ -826,6 +827,7 @@ __metadata:
|
||||
"@types/mixpanel-browser": "npm:^2.49.0"
|
||||
"@types/react": "npm:^18.2.60"
|
||||
"@types/react-dom": "npm:^18.2.19"
|
||||
core-js: "npm:^3.36.1"
|
||||
intl-segmenter-polyfill-rs: "npm:^0.1.7"
|
||||
mixpanel-browser: "npm:^2.49.0"
|
||||
react: "npm:^18.2.0"
|
||||
@ -3921,6 +3923,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@datastructures-js/binary-search-tree@npm:^5.3.2":
|
||||
version: 5.3.2
|
||||
resolution: "@datastructures-js/binary-search-tree@npm:5.3.2"
|
||||
checksum: 10/05936b8710e7db5f6e748ffbcd857ca692ba5de8edb87988d5f093085df9e9a15af5caa1a9de020682703ba15f425e2fd513c540a1cb1da293bf0c67793cdae7
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@develar/schema-utils@npm:~2.6.5":
|
||||
version: 2.6.5
|
||||
resolution: "@develar/schema-utils@npm:2.6.5"
|
||||
@ -13492,6 +13501,7 @@ __metadata:
|
||||
"@blocksuite/lit": "npm:0.13.0-canary-202403140735-2367cd5"
|
||||
"@blocksuite/presets": "npm:0.13.0-canary-202403140735-2367cd5"
|
||||
"@blocksuite/store": "npm:0.13.0-canary-202403140735-2367cd5"
|
||||
"@datastructures-js/binary-search-tree": "npm:^5.3.2"
|
||||
"@testing-library/react": "npm:^14.2.1"
|
||||
async-call-rpc: "npm:^6.4.0"
|
||||
foxact: "npm:^0.2.31"
|
||||
@ -18390,6 +18400,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"core-js@npm:^3.36.1":
|
||||
version: 3.36.1
|
||||
resolution: "core-js@npm:3.36.1"
|
||||
checksum: 10/ce1e1bfc1034b6f2ff7c91077319e8abdd650ee606ffe6e80073e64ab9d8aad2d6a6d953461b01f331a6f796ad2fd766a3386b88aa371b45d44fa7c0b9913ce6
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"core-util-is@npm:~1.0.0":
|
||||
version: 1.0.3
|
||||
resolution: "core-util-is@npm:1.0.3"
|
||||
|
Loading…
Reference in New Issue
Block a user