chore: prohibit using mergeUpdates (#3701)

This commit is contained in:
Alex Yang 2023-08-11 11:55:17 -04:00 committed by GitHub
parent e9f4912665
commit 9639143df4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 142 additions and 94 deletions

View File

@ -31,6 +31,11 @@ const createPattern = packageName => [
message: 'Use `useNavigateHelper` instead',
importNames: ['useNavigate'],
},
{
group: ['yjs'],
message: 'Do not use this API because it has a bug',
importNames: ['mergeUpdates'],
},
];
const allPackages = [
@ -155,6 +160,11 @@ const config = {
message: 'Use `useNavigateHelper` instead',
importNames: ['useNavigate'],
},
{
group: ['yjs'],
message: 'Do not use this API because it has a bug',
importNames: ['mergeUpdates'],
},
],
},
],

View File

@ -24,7 +24,7 @@ import { currentPageIdAtom } from '@toeverything/infra/atom';
import { useAtom, useAtomValue, useSetAtom } from 'jotai';
import { useCallback, useState } from 'react';
import { useParams } from 'react-router-dom';
import * as Y from 'yjs';
import { applyUpdate, encodeStateAsUpdate } from 'yjs';
import { pageSettingFamily, setPageModeAtom } from '../../../../atoms';
import { useBlockSuiteMetaHelper } from '../../../../hooks/affine/use-block-suite-meta-helper';
@ -35,6 +35,7 @@ import { HeaderDropDownButton } from '../../../pure/header-drop-down-button';
import { usePageHelper } from '../../block-suite-page-list/utils';
import { LanguageMenu } from './language-menu';
import { MenuThemeModeSwitch } from './theme-mode-switch';
const CommonMenu = () => {
const content = (
<div
@ -117,8 +118,8 @@ export const PageMenu = ({ rename }: PageMenuProps) => {
const currentPageMeta = currentPage.meta;
const newPage = createPage();
await newPage.waitForLoaded();
const update = Y.encodeStateAsUpdate(currentPage.spaceDoc);
Y.applyUpdate(newPage.spaceDoc, update);
const update = encodeStateAsUpdate(currentPage.spaceDoc);
applyUpdate(newPage.spaceDoc, update);
setPageMeta(newPage.id, {
tags: currentPageMeta.tags,
favorite: currentPageMeta.favorite,

View File

@ -2,7 +2,7 @@ import path from 'node:path';
import { SqliteConnection } from '@affine/native';
import { afterEach, describe, expect, it, vi } from 'vitest';
import * as Y from 'yjs';
import { applyUpdate, Doc as YDoc } from 'yjs';
import { removeWithRetry } from '../../../../tests/utils';
import { copyToTemp, migrateToSubdocAndReplaceDatabase } from '../migration';
@ -41,14 +41,14 @@ describe('migrateToSubdocAndReplaceDatabase', () => {
expect(subdocUpdate).toBeDefined();
// apply updates
const rootDoc = new Y.Doc();
Y.applyUpdate(rootDoc, rootUpdate);
const rootDoc = new YDoc();
applyUpdate(rootDoc, rootUpdate);
// check if root doc has one subdoc
expect(rootDoc.subdocs.size).toBe(1);
// populates subdoc
Y.applyUpdate(rootDoc.subdocs.values().next().value, subdocUpdate);
applyUpdate(rootDoc.subdocs.values().next().value, subdocUpdate);
// check if root doc's meta is correct
const meta = rootDoc.getMap('meta').toJSON();
@ -59,9 +59,7 @@ describe('migrateToSubdocAndReplaceDatabase', () => {
expect(pageMeta.title).toBe('Welcome to AFFiNEd');
// get the subdoc through id
const subDoc = rootDoc
.getMap('spaces')
.get(`space:${pageMeta.id}`) as Y.Doc;
const subDoc = rootDoc.getMap('spaces').get(`space:${pageMeta.id}`) as YDoc;
expect(subDoc).toEqual(rootDoc.subdocs.values().next().value);
await db.close();

View File

@ -3,7 +3,7 @@ import path from 'node:path';
import fs from 'fs-extra';
import { v4 } from 'uuid';
import { afterEach, expect, test, vi } from 'vitest';
import * as Y from 'yjs';
import { Doc as YDoc, encodeStateAsUpdate } from 'yjs';
import { removeWithRetry } from '../../../../tests/utils';
import { dbSubjects } from '../subjects';
@ -21,18 +21,18 @@ afterEach(async () => {
await removeWithRetry(tmpDir);
});
let testYDoc: Y.Doc;
let testYSubDoc: Y.Doc;
let testYDoc: YDoc;
let testYSubDoc: YDoc;
function getTestUpdates() {
testYDoc = new Y.Doc();
testYDoc = new YDoc();
const yText = testYDoc.getText('test');
yText.insert(0, 'hello');
testYSubDoc = new Y.Doc();
testYSubDoc = new YDoc();
testYDoc.getMap('subdocs').set('test-subdoc', testYSubDoc);
const updates = Y.encodeStateAsUpdate(testYDoc);
const updates = encodeStateAsUpdate(testYDoc);
return updates;
}
@ -41,7 +41,7 @@ function getTestSubDocUpdates() {
const yText = testYSubDoc.getText('test');
yText.insert(0, 'hello');
const updates = Y.encodeStateAsUpdate(testYSubDoc);
const updates = encodeStateAsUpdate(testYSubDoc);
return updates;
}

View File

@ -1,11 +1,11 @@
import * as Y from 'yjs';
import { applyUpdate, Doc as YDoc, encodeStateAsUpdate, transact } from 'yjs';
export function mergeUpdate(updates: Uint8Array[]) {
const yDoc = new Y.Doc();
Y.transact(yDoc, () => {
const yDoc = new YDoc();
transact(yDoc, () => {
for (const update of updates) {
Y.applyUpdate(yDoc, update);
applyUpdate(yDoc, update);
}
});
return Y.encodeStateAsUpdate(yDoc);
return encodeStateAsUpdate(yDoc);
}

View File

@ -4,7 +4,7 @@ import { migrateToSubdoc } from '@affine/env/blocksuite';
import { SqliteConnection } from '@affine/native';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import * as Y from 'yjs';
import { applyUpdate, Doc as YDoc, encodeStateAsUpdate } from 'yjs';
import { mainRPC } from '../main-rpc';
@ -13,11 +13,11 @@ export const migrateToSubdocAndReplaceDatabase = async (path: string) => {
await db.connect();
const rows = await db.getAllUpdates();
const originalDoc = new Y.Doc();
const originalDoc = new YDoc();
// 1. apply all updates to the root doc
rows.forEach(row => {
Y.applyUpdate(originalDoc, row.data);
applyUpdate(originalDoc, row.data);
});
// 2. migrate using migrateToSubdoc
@ -40,10 +40,10 @@ export const copyToTemp = async (path: string) => {
async function replaceRows(
db: SqliteConnection,
doc: Y.Doc,
doc: YDoc,
isRoot: boolean
): Promise<void> {
const migratedUpdates = Y.encodeStateAsUpdate(doc);
const migratedUpdates = encodeStateAsUpdate(doc);
const docId = isRoot ? undefined : doc.guid;
const rows = [{ data: migratedUpdates, docId: docId }];
await db.replaceUpdates(docId, rows);

View File

@ -2,7 +2,7 @@ import assert from 'node:assert';
import type { InsertRow } from '@affine/native';
import { debounce } from 'lodash-es';
import * as Y from 'yjs';
import { applyUpdate, Doc as YDoc } from 'yjs';
import { logger } from '../logger';
import type { YOrigin } from '../type';
@ -16,7 +16,7 @@ const FLUSH_MAX_WAIT_TIME = 10000;
// todo: trim db when it is too big
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
role = 'secondary';
yDoc = new Y.Doc();
yDoc = new YDoc();
firstConnected = false;
destroyed = false;
@ -165,7 +165,7 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
}
};
const onSubdocs = ({ added }: { added: Set<Y.Doc> }) => {
const onSubdocs = ({ added }: { added: Set<YDoc> }) => {
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
@ -214,7 +214,7 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
) => {
const doc = this.getDoc(docId);
if (doc) {
Y.applyUpdate(this.yDoc, data, origin);
applyUpdate(this.yDoc, data, origin);
} else {
logger.warn(
'[SecondaryWorkspaceSQLiteDB] applyUpdate: doc not found',

View File

@ -1,7 +1,7 @@
import type { InsertRow } from '@affine/native';
import { debounce } from 'lodash-es';
import { Subject } from 'rxjs';
import * as Y from 'yjs';
import { applyUpdate, Doc as YDoc, encodeStateAsUpdate } from 'yjs';
import { logger } from '../logger';
import type { YOrigin } from '../type';
@ -13,7 +13,7 @@ const TRIM_SIZE = 500;
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
role = 'primary';
yDoc = new Y.Doc();
yDoc = new YDoc();
firstConnected = false;
update$ = new Subject<void>();
@ -78,7 +78,7 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
doc.subdocs.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
const onSubdocs = ({ added }: { added: Set<Y.Doc> }) => {
const onSubdocs = ({ added }: { added: Set<YDoc> }) => {
logger.info('onSubdocs', this.workspaceId, docId, added);
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
@ -126,7 +126,7 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
getDocAsUpdates = (docId?: string) => {
const doc = docId ? this.getDoc(docId) : this.yDoc;
if (doc) {
return Y.encodeStateAsUpdate(doc);
return encodeStateAsUpdate(doc);
}
return null;
};
@ -144,7 +144,7 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
// yjs-idb will always trim the db for the first time after DB is loaded
const doc = this.getDoc(docId);
if (doc) {
Y.applyUpdate(doc, data, origin);
applyUpdate(doc, data, origin);
} else {
logger.warn('[WorkspaceSQLiteDB] applyUpdate: doc not found', docId);
}

View File

@ -2,7 +2,8 @@ import { readFileSync } from 'fs';
import { dirname, resolve } from 'path';
import { fileURLToPath } from 'url';
import { describe, expect, test } from 'vitest';
import * as Y from 'yjs';
import type { Array as YArray, Map as YMap } from 'yjs';
import { applyUpdate, Doc } from 'yjs';
import { migrateToSubdoc } from '../blocksuite/index.js';
@ -11,8 +12,8 @@ const fixturePath = resolve(
'workspace.ydoc'
);
const yDocBuffer = readFileSync(fixturePath);
const doc = new Y.Doc();
Y.applyUpdate(doc, new Uint8Array(yDocBuffer));
const doc = new Doc();
applyUpdate(doc, new Uint8Array(yDocBuffer));
const migratedDoc = migrateToSubdoc(doc);
describe('subdoc', () => {
@ -23,8 +24,8 @@ describe('subdoc', () => {
for (let i = 0; i < length; i++) {
binary[i] = (json as any)[i];
}
const doc = new Y.Doc();
Y.applyUpdate(doc, binary);
const doc = new Doc();
applyUpdate(doc, binary);
{
// invoke data
doc.getMap('space:hello-world');
@ -32,7 +33,7 @@ describe('subdoc', () => {
}
const blocks = doc.getMap('space:hello-world').toJSON();
const newDoc = migrateToSubdoc(doc);
const subDoc = newDoc.getMap('spaces').get('space:hello-world') as Y.Doc;
const subDoc = newDoc.getMap('spaces').get('space:hello-world') as Doc;
const data = (subDoc.toJSON() as any).blocks;
Object.keys(data).forEach(id => {
if (id === 'xyWNqindHH') {
@ -50,23 +51,23 @@ describe('subdoc', () => {
test('Test fixture should be set correctly', () => {
const meta = doc.getMap('space:meta');
const versions = meta.get('versions') as Y.Map<unknown>;
const versions = meta.get('versions') as YMap<unknown>;
expect(versions.get('affine:code')).toBeTypeOf('number');
});
test('Meta data should be migrated correctly', () => {
const originalMeta = doc.getMap('space:meta');
const originalVersions = originalMeta.get('versions') as Y.Map<unknown>;
const originalVersions = originalMeta.get('versions') as YMap<unknown>;
const meta = migratedDoc.getMap('meta');
const blockVersions = meta.get('blockVersions') as Y.Map<unknown>;
const blockVersions = meta.get('blockVersions') as YMap<unknown>;
expect(meta.get('workspaceVersion')).toBe(1);
expect(blockVersions.get('affine:code')).toBe(
originalVersions.get('affine:code')
);
expect((meta.get('pages') as Y.Array<unknown>).length).toBe(
(originalMeta.get('pages') as Y.Array<unknown>).length
expect((meta.get('pages') as YArray<unknown>).length).toBe(
(originalMeta.get('pages') as YArray<unknown>).length
);
expect(blockVersions.get('affine:embed')).toBeUndefined();

View File

@ -1,5 +1,5 @@
import type { Schema } from '@blocksuite/store';
import * as Y from 'yjs';
import { Array as YArray, Doc as YDoc, Map as YMap } from 'yjs';
type XYWH = [number, number, number, number];
@ -7,10 +7,10 @@ function deserializeXYWH(xywh: string): XYWH {
return JSON.parse(xywh) as XYWH;
}
function migrateDatabase(data: Y.Map<unknown>) {
function migrateDatabase(data: YMap<unknown>) {
data.delete('prop:mode');
data.set('prop:views', new Y.Array());
const columns = (data.get('prop:columns') as Y.Array<unknown>).toJSON() as {
data.set('prop:views', new YArray());
const columns = (data.get('prop:columns') as YArray<unknown>).toJSON() as {
id: string;
name: string;
hide: boolean;
@ -31,7 +31,7 @@ function migrateDatabase(data: Y.Map<unknown>) {
mode: 'table',
},
];
const cells = (data.get('prop:cells') as Y.Map<unknown>).toJSON() as Record<
const cells = (data.get('prop:cells') as YMap<unknown>).toJSON() as Record<
string,
Record<
string,
@ -90,7 +90,7 @@ function migrateDatabase(data: Y.Map<unknown>) {
function runBlockMigration(
flavour: string,
data: Y.Map<unknown>,
data: YMap<unknown>,
version: number
) {
if (flavour === 'affine:frame') {
@ -99,12 +99,12 @@ function runBlockMigration(
}
if (flavour === 'affine:surface' && version <= 3) {
if (data.has('elements')) {
const elements = data.get('elements') as Y.Map<unknown>;
const elements = data.get('elements') as YMap<unknown>;
migrateSurface(elements);
data.set('prop:elements', elements.clone());
data.delete('elements');
} else {
data.set('prop:elements', new Y.Map());
data.set('prop:elements', new YMap());
}
}
if (flavour === 'affine:embed') {
@ -116,8 +116,8 @@ function runBlockMigration(
}
}
function migrateSurface(data: Y.Map<unknown>) {
for (const [, value] of <IterableIterator<[string, Y.Map<unknown>]>>(
function migrateSurface(data: YMap<unknown>) {
for (const [, value] of <IterableIterator<[string, YMap<unknown>]>>(
data.entries()
)) {
if (value.get('type') === 'connector') {
@ -126,7 +126,7 @@ function migrateSurface(data: Y.Map<unknown>) {
}
}
function migrateSurfaceConnector(data: Y.Map<any>) {
function migrateSurfaceConnector(data: YMap<any>) {
let id = data.get('startElement')?.id;
const controllers = data.get('controllers');
const length = controllers.length;
@ -164,7 +164,7 @@ function migrateSurfaceConnector(data: Y.Map<any>) {
data.delete('xywh');
}
function updateBlockVersions(versions: Y.Map<number>) {
function updateBlockVersions(versions: YMap<number>) {
const frameVersion = versions.get('affine:frame');
if (frameVersion !== undefined) {
versions.set('affine:note', frameVersion);
@ -181,12 +181,12 @@ function updateBlockVersions(versions: Y.Map<number>) {
}
}
function migrateMeta(oldDoc: Y.Doc, newDoc: Y.Doc) {
function migrateMeta(oldDoc: YDoc, newDoc: YDoc) {
const originalMeta = oldDoc.getMap('space:meta');
const originalVersions = originalMeta.get('versions') as Y.Map<number>;
const originalPages = originalMeta.get('pages') as Y.Array<Y.Map<unknown>>;
const originalVersions = originalMeta.get('versions') as YMap<number>;
const originalPages = originalMeta.get('pages') as YArray<YMap<unknown>>;
const meta = newDoc.getMap('meta');
const pages = new Y.Array();
const pages = new YArray();
const blockVersions = originalVersions.clone();
meta.set('workspaceVersion', 1);
@ -196,7 +196,7 @@ function migrateMeta(oldDoc: Y.Doc, newDoc: Y.Doc) {
updateBlockVersions(blockVersions);
const mapList = originalPages.map(page => {
const map = new Y.Map();
const map = new YMap();
Array.from(page.entries())
.filter(([key]) => key !== 'subpageIds')
.forEach(([key, value]) => {
@ -207,16 +207,16 @@ function migrateMeta(oldDoc: Y.Doc, newDoc: Y.Doc) {
pages.push(mapList);
}
function migrateBlocks(oldDoc: Y.Doc, newDoc: Y.Doc) {
function migrateBlocks(oldDoc: YDoc, newDoc: YDoc) {
const spaces = newDoc.getMap('spaces');
const originalMeta = oldDoc.getMap('space:meta');
const originalVersions = originalMeta.get('versions') as Y.Map<number>;
const originalPages = originalMeta.get('pages') as Y.Array<Y.Map<unknown>>;
const originalVersions = originalMeta.get('versions') as YMap<number>;
const originalPages = originalMeta.get('pages') as YArray<YMap<unknown>>;
originalPages.forEach(page => {
const id = page.get('id') as string;
const spaceId = id.startsWith('space:') ? id : `space:${id}`;
const originalBlocks = oldDoc.getMap(spaceId) as Y.Map<unknown>;
const subdoc = new Y.Doc();
const originalBlocks = oldDoc.getMap(spaceId) as YMap<unknown>;
const subdoc = new YDoc();
spaces.set(spaceId, subdoc);
const blocks = subdoc.getMap('blocks');
Array.from(originalBlocks.entries()).forEach(([key, value]) => {
@ -231,19 +231,19 @@ function migrateBlocks(oldDoc: Y.Doc, newDoc: Y.Doc) {
});
}
export function migrateToSubdoc(doc: Y.Doc): Y.Doc {
export function migrateToSubdoc(doc: YDoc): YDoc {
const needMigration = Array.from(doc.getMap('space:meta').keys()).length > 0;
if (!needMigration) {
return doc;
}
const output = new Y.Doc();
const output = new YDoc();
migrateMeta(doc, output);
migrateBlocks(doc, output);
return output;
}
export async function migrateDatabaseBlockTo3(rootDoc: Y.Doc, schema: Schema) {
const spaces = rootDoc.getMap('spaces') as Y.Map<any>;
export async function migrateDatabaseBlockTo3(rootDoc: YDoc, schema: Schema) {
const spaces = rootDoc.getMap('spaces') as YMap<any>;
spaces.forEach(space => {
schema.upgradePage(
{
@ -261,7 +261,7 @@ export async function migrateDatabaseBlockTo3(rootDoc: Y.Doc, schema: Schema) {
space
);
});
const meta = rootDoc.getMap('meta') as Y.Map<unknown>;
const versions = meta.get('blockVersions') as Y.Map<number>;
const meta = rootDoc.getMap('meta') as YMap<unknown>;
const versions = meta.get('blockVersions') as YMap<number>;
versions.set('affine:database', 3);
}

View File

@ -2,13 +2,44 @@ import assert from 'node:assert';
import { beforeEach, describe, test } from 'node:test';
import { encoding } from 'lib0';
import * as Y from 'yjs';
import { applyUpdate, Doc } from 'yjs';
import { Storage } from '../index.js';
// update binary by y.doc.text('content').insert('hello world')
// prettier-ignore
let init = Buffer.from([1,1,160,238,169,240,10,0,4,1,7,99,111,110,116,101,110,116,11,104,101,108,108,111,32,119,111,114,108,100,0])
let init = Buffer.from([
1,
1,
160,
238,
169,
240,
10,
0,
4,
1,
7,
99,
111,
110,
116,
101,
110,
116,
11,
104,
101,
108,
108,
111,
32,
119,
111,
114,
108,
100,
0])
describe('Test jwst storage binding', () => {
/** @type { Storage } */
let storage;
@ -48,8 +79,8 @@ describe('Test jwst storage binding', () => {
const update = await storage.load(workspace.doc.guid);
assert(update !== null);
const doc = new Y.Doc();
Y.applyUpdate(doc, update);
const doc = new Doc();
applyUpdate(doc, update);
let text = doc.getText('content');
assert.equal(text.toJSON(), 'hello world');
@ -67,8 +98,8 @@ describe('Test jwst storage binding', () => {
}
const update2 = await storage.load(workspace.doc.guid);
const doc2 = new Y.Doc();
Y.applyUpdate(doc2, update2);
const doc2 = new Doc();
applyUpdate(doc2, update2);
text = doc2.getText('content');
assert.equal(text.toJSON(), 'hello my world!');
@ -80,8 +111,8 @@ describe('Test jwst storage binding', () => {
const update = await storage.load(workspace.doc.guid);
assert(update !== null);
const doc = new Y.Doc();
Y.applyUpdate(doc, update);
const doc = new Doc();
applyUpdate(doc, update);
let text = doc.getText('content');
assert.equal(text.toJSON(), 'hello world');
@ -103,8 +134,8 @@ describe('Test jwst storage binding', () => {
}
const update2 = await storage.load(workspace.doc.guid);
const doc2 = new Y.Doc();
Y.applyUpdate(doc2, update2);
const doc2 = new Doc();
applyUpdate(doc2, update2);
text = doc2.getText('content');
assert.equal(text.toJSON(), 'hello my world!');

View File

@ -435,15 +435,13 @@ describe('utils', () => {
});
test('overwrite binary', async () => {
await overwriteBinary('test', new Uint8Array([1, 2, 3]));
const doc = new Doc();
const map = doc.getMap();
map.set('1', 1);
await overwriteBinary('test', new Uint8Array(encodeStateAsUpdate(doc)));
{
const binary = await downloadBinary('test');
expect(binary).toEqual(new Uint8Array([1, 2, 3]));
}
await overwriteBinary('test', new Uint8Array([0, 0]));
{
const binary = await downloadBinary('test');
expect(binary).toEqual(new Uint8Array([0, 0]));
expect(binary).toEqual(new Uint8Array(encodeStateAsUpdate(doc)));
}
});
});

View File

@ -6,7 +6,7 @@ import {
import { assertExists } from '@blocksuite/global/utils';
import { openDB } from 'idb';
import type { Doc } from 'yjs';
import { diffUpdate, mergeUpdates } from 'yjs';
import { diffUpdate } from 'yjs';
import {
type BlockSuiteBinaryDB,
@ -16,6 +16,7 @@ import {
type UpdateMessage,
upgradeDB,
} from './shared';
import { mergeUpdates } from './utils';
let mergeCount = 500;

View File

@ -1,12 +1,20 @@
import type { IDBPDatabase } from 'idb';
import { openDB } from 'idb';
import { mergeUpdates } from 'yjs';
import { applyUpdate, Doc, encodeStateAsUpdate } from 'yjs';
import type { BlockSuiteBinaryDB, OldYjsDB, UpdateMessage } from './shared';
import { dbVersion, DEFAULT_DB_NAME, upgradeDB } from './shared';
let allDb: IDBDatabaseInfo[];
export function mergeUpdates(updates: Uint8Array[]) {
const doc = new Doc();
updates.forEach(update => {
applyUpdate(doc, update);
});
return encodeStateAsUpdate(doc);
}
async function databaseExists(name: string): Promise<boolean> {
return new Promise(resolve => {
const req = indexedDB.open(name);