feat: sqlite subdocument (#2816)

Co-authored-by: Alex Yang <himself65@outlook.com>
This commit is contained in:
Peng Xiao 2023-06-27 15:40:37 +08:00 committed by GitHub
parent 4307e1eb6b
commit 05452bb297
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 842 additions and 426 deletions

View File

@ -12,6 +12,7 @@ import type { PlaywrightTestConfig } from '@playwright/test';
*/
const config: PlaywrightTestConfig = {
testDir: './tests',
testIgnore: '**/lib/**',
fullyParallel: true,
timeout: process.env.CI ? 50_000 : 30_000,
use: {

View File

@ -20,14 +20,31 @@ afterEach(async () => {
await fs.remove(tmpDir);
});
let testYDoc: Y.Doc;
let testYSubDoc: Y.Doc;
function getTestUpdates() {
const testYDoc = new Y.Doc();
testYDoc = new Y.Doc();
const yText = testYDoc.getText('test');
yText.insert(0, 'hello');
testYSubDoc = new Y.Doc();
testYDoc.getMap('subdocs').set('test-subdoc', testYSubDoc);
const updates = Y.encodeStateAsUpdate(testYDoc);
return updates;
}
function getTestSubDocUpdates() {
const yText = testYSubDoc.getText('test');
yText.insert(0, 'hello');
const updates = Y.encodeStateAsUpdate(testYSubDoc);
return updates;
}
test('can create new db file if not exists', async () => {
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
const workspaceId = v4();
@ -68,6 +85,31 @@ test('on applyUpdate (from renderer), will trigger update', async () => {
await db.destroy();
});
test('on applyUpdate (from renderer, subdoc), will trigger update', async () => {
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
const workspaceId = v4();
const onUpdate = vi.fn();
const insertUpdates = vi.fn();
const db = await openWorkspaceDatabase(workspaceId);
db.applyUpdate(getTestUpdates(), 'renderer');
db.db!.insertUpdates = insertUpdates;
db.update$.subscribe(onUpdate);
const subdocUpdates = getTestSubDocUpdates();
db.applyUpdate(subdocUpdates, 'renderer', testYSubDoc.guid);
expect(onUpdate).toHaveBeenCalled();
expect(insertUpdates).toHaveBeenCalledWith([
{
docId: testYSubDoc.guid,
data: subdocUpdates,
},
]);
await db.destroy();
});
test('on applyUpdate (from external), will trigger update & send external update event', async () => {
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
const workspaceId = v4();

View File

@ -1,4 +1,4 @@
import { SqliteConnection } from '@affine/native';
import { type InsertRow, SqliteConnection } from '@affine/native';
import { logger } from '../logger';
@ -79,21 +79,34 @@ export abstract class BaseSQLiteAdapter {
}
}
async getUpdates() {
async getUpdates(docId?: string) {
try {
if (!this.db) {
logger.warn(`${this.path} is not connected`);
return [];
}
return await this.db.getUpdates();
return await this.db.getUpdates(docId);
} catch (error) {
logger.error('getUpdates', error);
return [];
}
}
async getAllUpdates() {
try {
if (!this.db) {
logger.warn(`${this.path} is not connected`);
return [];
}
return await this.db.getAllUpdates();
} catch (error) {
logger.error('getAllUpdates', error);
return [];
}
}
// add a single update to SQLite
async addUpdateToSQLite(updates: Uint8Array[]) {
async addUpdateToSQLite(updates: InsertRow[]) {
// batch write instead write per key stroke?
try {
if (!this.db) {

View File

@ -7,13 +7,17 @@ export * from './ensure-db';
export * from './subjects';
export const dbHandlers = {
getDocAsUpdates: async (id: string) => {
const workspaceDB = await ensureSQLiteDB(id);
return workspaceDB.getDocAsUpdates();
getDocAsUpdates: async (workspaceId: string, subdocId?: string) => {
const workspaceDB = await ensureSQLiteDB(workspaceId);
return workspaceDB.getDocAsUpdates(subdocId);
},
applyDocUpdate: async (id: string, update: Uint8Array) => {
const workspaceDB = await ensureSQLiteDB(id);
return workspaceDB.applyUpdate(update);
applyDocUpdate: async (
workspaceId: string,
update: Uint8Array,
subdocId?: string
) => {
const workspaceDB = await ensureSQLiteDB(workspaceId);
return workspaceDB.applyUpdate(update, 'renderer', subdocId);
},
addBlob: async (workspaceId: string, key: string, data: Uint8Array) => {
const workspaceDB = await ensureSQLiteDB(workspaceId);
@ -38,7 +42,11 @@ export const dbHandlers = {
export const dbEvents = {
onExternalUpdate: (
fn: (update: { workspaceId: string; update: Uint8Array }) => void
fn: (update: {
workspaceId: string;
update: Uint8Array;
docId?: string;
}) => void
) => {
const sub = dbSubjects.externalUpdate.subscribe(fn);
return () => {

View File

@ -1,6 +1,6 @@
import assert from 'node:assert';
import type { SqliteConnection } from '@affine/native';
import type { InsertRow } from '@affine/native';
import { debounce } from 'lodash-es';
import * as Y from 'yjs';
@ -8,19 +8,19 @@ import { logger } from '../logger';
import type { YOrigin } from '../type';
import { getWorkspaceMeta } from '../workspace';
import { BaseSQLiteAdapter } from './base-db-adapter';
import { mergeUpdate } from './merge-update';
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
const FLUSH_WAIT_TIME = 5000;
const FLUSH_MAX_WAIT_TIME = 10000;
// todo: trim db when it is too big
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
role = 'secondary';
yDoc = new Y.Doc();
firstConnected = false;
destroyed = false;
updateQueue: Uint8Array[] = [];
updateQueue: { data: Uint8Array; docId?: string }[] = [];
unsubscribers = new Set<() => void>();
@ -29,10 +29,23 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
public upstream: WorkspaceSQLiteDB
) {
super(path);
this.setupAndListen();
this.init();
logger.debug('[SecondaryWorkspaceSQLiteDB] created', this.workspaceId);
}
getDoc(docId?: string) {
if (!docId) {
return this.yDoc;
}
// this should be pretty fast and we don't need to cache it
for (const subdoc of this.yDoc.subdocs) {
if (subdoc.guid === docId) {
return subdoc;
}
}
return null;
}
override async destroy() {
await this.flushUpdateQueue();
this.unsubscribers.forEach(unsub => unsub());
@ -47,7 +60,7 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
// do not update db immediately, instead, push to a queue
// and flush the queue in a future time
async addUpdateToUpdateQueue(db: SqliteConnection, update: Uint8Array) {
async addUpdateToUpdateQueue(update: InsertRow) {
this.updateQueue.push(update);
await this.debouncedFlush();
}
@ -101,55 +114,82 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
}
}
setupAndListen() {
if (this.firstConnected) {
setupListener(docId?: string) {
const doc = this.getDoc(docId);
if (!doc) {
return;
}
this.firstConnected = true;
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
if (origin === 'renderer') {
// update to upstream yDoc should be replicated to self yDoc
this.applyUpdate(update, 'upstream');
this.applyUpdate(update, 'upstream', docId);
}
};
const onSelfUpdate = async (update: Uint8Array, origin: YOrigin) => {
// for self update from upstream, we need to push it to external DB
if (origin === 'upstream' && this.db) {
await this.addUpdateToUpdateQueue(this.db, update);
if (origin === 'upstream') {
await this.addUpdateToUpdateQueue({
data: update,
docId,
});
}
if (origin === 'self') {
this.upstream.applyUpdate(update, 'external');
this.upstream.applyUpdate(update, 'external', docId);
}
};
const onSubdocs = ({ added }: { added: Set<Y.Doc> }) => {
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
};
// listen to upstream update
this.upstream.yDoc.on('update', onUpstreamUpdate);
this.yDoc.on('update', onSelfUpdate);
this.yDoc.on('subdocs', onSubdocs);
this.unsubscribers.add(() => {
this.upstream.yDoc.off('update', onUpstreamUpdate);
this.yDoc.off('update', onSelfUpdate);
});
this.run(() => {
// apply all updates from upstream
const upstreamUpdate = this.upstream.getDocAsUpdates();
// to initialize the yDoc, we need to apply all updates from the db
this.applyUpdate(upstreamUpdate, 'upstream');
})
.then(() => {
logger.debug('run success');
})
.catch(err => {
logger.error('run error', err);
this.yDoc.off('subdocs', onSubdocs);
});
}
applyUpdate = (data: Uint8Array, origin: YOrigin = 'upstream') => {
init() {
if (this.firstConnected) {
return;
}
this.firstConnected = true;
this.setupListener();
// apply all updates from upstream
// we assume here that the upstream ydoc is already sync'ed
const syncUpstreamDoc = (docId?: string) => {
const update = this.upstream.getDocAsUpdates(docId);
if (update) {
this.applyUpdate(update, 'upstream');
}
};
syncUpstreamDoc();
this.upstream.yDoc.subdocs.forEach(subdoc => {
syncUpstreamDoc(subdoc.guid);
});
}
applyUpdate = (
data: Uint8Array,
origin: YOrigin = 'upstream',
docId?: string
) => {
const doc = this.getDoc(docId);
if (doc) {
Y.applyUpdate(this.yDoc, data, origin);
} else {
logger.warn('applyUpdate: doc not found', docId);
}
};
// TODO: have a better solution to handle blobs
@ -186,23 +226,33 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
async pull() {
const start = performance.now();
assert(this.upstream.db, 'upstream db should be connected');
const updates = await this.run(async () => {
const rows = await this.run(async () => {
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
await this.syncBlobs();
return (await this.getUpdates()).map(update => update.data);
return await this.getAllUpdates();
});
if (!updates || this.destroyed) {
if (!rows || this.destroyed) {
return;
}
const merged = mergeUpdate(updates);
this.applyUpdate(merged, 'self');
// apply root doc first
rows.forEach(row => {
if (!row.docId) {
this.applyUpdate(row.data, 'self');
}
});
rows.forEach(row => {
if (row.docId) {
this.applyUpdate(row.data, 'self', row.docId);
}
});
logger.debug(
'pull external updates',
this.path,
updates.length,
rows.length,
(performance.now() - start).toFixed(2),
'ms'
);

View File

@ -1,5 +1,9 @@
import { Subject } from 'rxjs';
export const dbSubjects = {
externalUpdate: new Subject<{ workspaceId: string; update: Uint8Array }>(),
externalUpdate: new Subject<{
workspaceId: string;
update: Uint8Array;
docId?: string;
}>(),
};

View File

@ -1,3 +1,5 @@
import type { InsertRow } from '@affine/native';
import { debounce } from 'lodash-es';
import { Subject } from 'rxjs';
import * as Y from 'yjs';
@ -5,9 +7,10 @@ import { logger } from '../logger';
import type { YOrigin } from '../type';
import { getWorkspaceMeta } from '../workspace';
import { BaseSQLiteAdapter } from './base-db-adapter';
import { mergeUpdate } from './merge-update';
import { dbSubjects } from './subjects';
const TRIM_SIZE = 500;
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
role = 'primary';
yDoc = new Y.Doc();
@ -28,33 +31,76 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
this.firstConnected = false;
}
getDoc(docId?: string) {
if (!docId) {
return this.yDoc;
}
// this should be pretty fast and we don't need to cache it
for (const subdoc of this.yDoc.subdocs) {
if (subdoc.guid === docId) {
return subdoc;
}
}
return null;
}
getWorkspaceName = () => {
return this.yDoc.getMap('space:meta').get('name') as string;
return this.yDoc.getMap('meta').get('name') as string;
};
setupListener(docId?: string) {
const doc = this.getDoc(docId);
if (doc) {
const onUpdate = async (update: Uint8Array, origin: YOrigin) => {
const insertRows = [{ data: update, docId }];
if (origin === 'renderer') {
await this.addUpdateToSQLite(insertRows);
} else if (origin === 'external') {
dbSubjects.externalUpdate.next({
workspaceId: this.workspaceId,
update,
docId,
});
await this.addUpdateToSQLite(insertRows);
logger.debug('external update', this.workspaceId);
}
};
const onSubdocs = ({ added }: { added: Set<Y.Doc> }) => {
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
};
doc.on('update', onUpdate);
doc.on('subdocs', onSubdocs);
} else {
logger.error('setupListener: doc not found', docId);
}
}
async init() {
const db = await super.connectIfNeeded();
if (!this.firstConnected) {
this.yDoc.on('update', async (update: Uint8Array, origin: YOrigin) => {
if (origin === 'renderer') {
await this.addUpdateToSQLite([update]);
} else if (origin === 'external') {
dbSubjects.externalUpdate.next({
workspaceId: this.workspaceId,
update,
});
await this.addUpdateToSQLite([update]);
logger.debug('external update', this.workspaceId);
}
});
this.setupListener();
}
const updates = await this.getUpdates();
const merged = mergeUpdate(updates.map(update => update.data));
const updates = await this.getAllUpdates();
// to initialize the yDoc, we need to apply all updates from the db
this.applyUpdate(merged, 'self');
// apply root first (without ID).
// subdoc will be available after root is applied
updates.forEach(update => {
if (!update.docId) {
this.applyUpdate(update.data, 'self');
}
});
// then, for all subdocs, apply the updates
updates.forEach(update => {
if (update.docId) {
this.applyUpdate(update.data, 'self', update.docId);
}
});
this.firstConnected = true;
this.update$.next();
@ -62,18 +108,32 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
return db;
}
getDocAsUpdates = () => {
return Y.encodeStateAsUpdate(this.yDoc);
// unlike getUpdates, this will return updates in yDoc
getDocAsUpdates = (docId?: string) => {
const doc = docId ? this.getDoc(docId) : this.yDoc;
if (doc) {
return Y.encodeStateAsUpdate(doc);
}
return null;
};
// non-blocking and use yDoc to validate the update
// after that, the update is added to the db
applyUpdate = (data: Uint8Array, origin: YOrigin = 'renderer') => {
applyUpdate = (
data: Uint8Array,
origin: YOrigin = 'renderer',
docId?: string
) => {
// todo: trim the updates when the number of records is too large
// 1. store the current ydoc state in the db
// 2. then delete the old updates
// yjs-idb will always trim the db for the first time after DB is loaded
Y.applyUpdate(this.yDoc, data, origin);
const doc = this.getDoc(docId);
if (doc) {
Y.applyUpdate(doc, data, origin);
} else {
logger.warn('applyUpdate: doc not found', docId);
}
};
override async addBlob(key: string, value: Uint8Array) {
@ -87,10 +147,30 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
await super.deleteBlob(key);
}
override async addUpdateToSQLite(data: Uint8Array[]) {
override async addUpdateToSQLite(data: InsertRow[]) {
this.update$.next();
data.forEach(row => {
this.trimWhenNecessary(row.docId)?.catch(err => {
logger.error('trimWhenNecessary failed', err);
});
});
await super.addUpdateToSQLite(data);
}
trimWhenNecessary = debounce(async (docId?: string) => {
if (this.firstConnected) {
const count = (await this.db?.getUpdatesCount(docId)) ?? 0;
if (count > TRIM_SIZE) {
logger.debug(`trim ${this.workspaceId}:${docId} ${count}`);
const update = this.getDocAsUpdates(docId);
if (update) {
const insertRows = [{ data: update, docId }];
await this.db?.replaceUpdates(docId, insertRows);
logger.debug(`trim ${this.workspaceId}:${docId} successfully`);
}
}
}
}, 1000);
}
export async function openWorkspaceDatabase(workspaceId: string) {

View File

@ -34,7 +34,6 @@ export function registerProtocol() {
const url = request.url.replace(/^file:\/\//, '');
const realpath = toAbsolutePath(url);
callback(realpath);
console.log('interceptFileProtocol realpath', request.url, realpath);
return true;
});

View File

@ -71,7 +71,7 @@ beforeEach(async () => {
.register(AffineSchemas)
.register(__unstableSchemas);
const initPage = async (page: Page) => {
await page.waitForLoaded()
await page.waitForLoaded();
expect(page).not.toBeNull();
assertExists(page);
const pageBlockId = page.addBlock('affine:page', {

View File

@ -9,7 +9,7 @@ import { useAtom, useAtomValue } from 'jotai';
import Link from 'next/link';
import { useRouter } from 'next/router';
import type { ReactElement } from 'react';
import { Suspense, useEffect } from 'react';
import { Suspense, useCallback, useEffect } from 'react';
import {
publicPageBlockSuiteAtom,
@ -17,7 +17,10 @@ import {
publicWorkspacePageIdAtom,
} from '../../../atoms/public-workspace';
import { BlockSuiteEditorHeader } from '../../../components/blocksuite/workspace-header';
import { PageDetailEditor } from '../../../components/page-detail-editor';
import {
PageDetailEditor,
type PageDetailEditorProps,
} from '../../../components/page-detail-editor';
import { WorkspaceAvatar } from '../../../components/pure/footer';
import { PageLoading } from '../../../components/pure/loading';
import { useRouterHelper } from '../../../hooks/use-router-helper';
@ -68,6 +71,19 @@ const PublicWorkspaceDetailPageInner = (): ReactElement => {
const [name] = useBlockSuiteWorkspaceName(blockSuiteWorkspace);
const [avatar] = useBlockSuiteWorkspaceAvatarUrl(blockSuiteWorkspace);
const pageTitle = blockSuiteWorkspace.meta.getPageMeta(pageId)?.title;
const onLoad = useCallback<NonNullable<PageDetailEditorProps['onLoad']>>(
(_, editor) => {
const { page } = editor;
page.awarenessStore.setReadonly(page, true);
const dispose = editor.slots.pageLinkClicked.on(({ pageId }) => {
return openPage(blockSuiteWorkspace.id, pageId);
});
return () => {
dispose.dispose();
};
},
[blockSuiteWorkspace.id, openPage]
);
return (
<>
<PublicQuickSearch workspace={publicWorkspace} />
@ -97,16 +113,7 @@ const PublicWorkspaceDetailPageInner = (): ReactElement => {
isPublic={true}
pageId={pageId}
workspace={publicWorkspace}
onLoad={(_, editor) => {
const { page } = editor;
page.awarenessStore.setReadonly(page, true);
const dispose = editor.slots.pageLinkClicked.on(({ pageId }) => {
return openPage(blockSuiteWorkspace.id, pageId);
});
return () => {
dispose.dispose();
};
}}
onLoad={onLoad}
onInit={initEmptyPage}
/>
</>

View File

@ -33,7 +33,8 @@
"{workspaceRoot}/apps/web/.next",
"{workspaceRoot}/packages/storybook/storybook-static",
"{workspaceRoot}/packages/native/affine.*.node",
"{workspaceRoot}/affine.db"
"{workspaceRoot}/affine.db",
"{workspaceRoot/apps/electron/dist"
],
"inputs": [
{

View File

@ -181,7 +181,6 @@ export const useZoomControls = ({
}
}, [imageRef]);
useEffect(() => {
const handleScroll = (event: WheelEvent) => {
const { deltaY } = event;

View File

@ -62,7 +62,6 @@ export const imagePreviewModalCloseButtonStyle = style({
zIndex: 1,
marginTop: '38px',
marginRight: '38px',
});
export const imagePreviewModalGoStyle = style({

View File

@ -5,7 +5,14 @@ import { useMediaQuery, useTheme } from '@mui/material';
import type React from 'react';
import { type CSSProperties } from 'react';
import { ScrollableContainer, Table, TableBody, TableCell, TableHead, TableHeadRow } from '../..';
import {
ScrollableContainer,
Table,
TableBody,
TableCell,
TableHead,
TableHeadRow,
} from '../..';
import { TableBodyRow } from '../../ui/table';
import { useHasScrollTop } from '../app-sidebar/sidebar-containers/use-has-scroll-top';
import { AllPagesBody } from './all-pages-body';
@ -141,12 +148,9 @@ export const PageList = ({
? DEFAULT_SORT_KEY
: undefined;
return (
sorter.data.length === 0 && fallback ?
<StyledTableContainer>
{fallback}
</StyledTableContainer>
:
return sorter.data.length === 0 && fallback ? (
<StyledTableContainer>{fallback}</StyledTableContainer>
) : (
<ScrollableContainer inTableView>
<StyledTableContainer ref={ref}>
<Table showBorder={hasScrollTop} style={{ maxHeight: '100%' }}>
@ -158,7 +162,6 @@ export const PageList = ({
importFile={onImportFile}
/>
<AllPagesBody
isPublicWorkspace={isPublicWorkspace}
groupKey={groupKey}
data={sorter.data}
@ -246,12 +249,9 @@ export const PageListTrashView: React.FC<{
}
);
return (
list.length === 0 && fallback ?
<StyledTableContainer>
{fallback}
</StyledTableContainer>
:
return list.length === 0 && fallback ? (
<StyledTableContainer>{fallback}</StyledTableContainer>
) : (
<ScrollableContainer inTableView>
<StyledTableContainer ref={ref}>
<Table showBorder={hasScrollTop}>

View File

@ -20,24 +20,22 @@ export const ScrollableContainer = ({
<ScrollArea.Root className={styles.scrollableContainerRoot}>
<div
data-has-scroll-top={hasScrollTop}
className={clsx({[styles.scrollTopBorder]:showScrollTopBorder})}
className={clsx({ [styles.scrollTopBorder]: showScrollTopBorder })}
/>
<ScrollArea.Viewport
className={clsx([styles.scrollableViewport])}
ref={ref}
>
<div className={styles.scrollableContainer}>
{children}
</div>
<div className={styles.scrollableContainer}>{children}</div>
</ScrollArea.Viewport>
<ScrollArea.Scrollbar
orientation="vertical"
className={clsx(styles.scrollbar,{[styles.TableScrollbar]:inTableView})}
className={clsx(styles.scrollbar, {
[styles.TableScrollbar]: inTableView,
})}
>
<ScrollArea.Thumb className={styles.scrollbarThumb} />
</ScrollArea.Scrollbar>
</ScrollArea.Root>
);
}
};

View File

@ -25,7 +25,7 @@ beforeEach(async () => {
.register(AffineSchemas)
.register(__unstableSchemas);
const initPage = async (page: Page) => {
await page.waitForLoaded()
await page.waitForLoaded();
expect(page).not.toBeNull();
assertExists(page);
const pageBlockId = page.addBlock('affine:page', {

View File

@ -51,5 +51,6 @@ export function useBlockSuiteWorkspacePage(
): Page | null {
const pageAtom = getAtom(blockSuiteWorkspace, pageId);
assertExists(pageAtom);
return useAtomValue(pageAtom);
const page = useAtomValue(pageAtom);
return page;
}

View File

@ -17,8 +17,15 @@ export abstract class HandlerManager<
}
type DBHandlers = {
getDocAsUpdates: (id: string) => Promise<Uint8Array>;
applyDocUpdate: (id: string, update: Uint8Array) => Promise<void>;
getDocAsUpdates: (
workspaceId: string,
subdocId?: string
) => Promise<Uint8Array>;
applyDocUpdate: (
id: string,
update: Uint8Array,
subdocId?: string
) => Promise<void>;
addBlob: (
workspaceId: string,
key: string,

View File

@ -1,12 +1,22 @@
use sqlx::sqlite::SqliteConnectOptions;
use std::fs;
#[tokio::main]
async fn main() -> Result<(), std::io::Error> {
dotenv::dotenv().ok();
// always start with a fresh database to have
// latest db schema
let db_path = "../../affine.db";
// check if db exists and then remove file
if fs::metadata(db_path).is_ok() {
fs::remove_file(db_path)?;
}
napi_build::setup();
let options = SqliteConnectOptions::new()
.filename("../../affine.db")
.filename(db_path)
.journal_mode(sqlx::sqlite::SqliteJournalMode::Off)
.locking_mode(sqlx::sqlite::SqliteLockingMode::Exclusive)
.create_if_missing(true);

View File

@ -32,6 +32,11 @@ export interface UpdateRow {
id: number;
timestamp: Date;
data: Buffer;
docId?: string;
}
export interface InsertRow {
docId?: string;
data: Uint8Array;
}
export class Subscription {
toString(): string;
@ -56,8 +61,14 @@ export class SqliteConnection {
getBlob(key: string): Promise<BlobRow | null>;
deleteBlob(key: string): Promise<void>;
getBlobKeys(): Promise<Array<string>>;
getUpdates(): Promise<Array<UpdateRow>>;
insertUpdates(updates: Array<Uint8Array>): Promise<void>;
getUpdates(docId?: string | undefined | null): Promise<Array<UpdateRow>>;
getUpdatesCount(docId?: string | undefined | null): Promise<number>;
getAllUpdates(): Promise<Array<UpdateRow>>;
insertUpdates(updates: Array<InsertRow>): Promise<void>;
replaceUpdates(
docId: string | undefined | null,
updates: Array<InsertRow>
): Promise<void>;
close(): Promise<void>;
get isClose(): boolean;
static validate(path: string): Promise<boolean>;

View File

@ -4,7 +4,8 @@
pub const SCHEMA: &str = r#"CREATE TABLE IF NOT EXISTS "updates" (
id INTEGER PRIMARY KEY AUTOINCREMENT,
data BLOB NOT NULL,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
doc_id TEXT
);
CREATE TABLE IF NOT EXISTS "blobs" (
key TEXT PRIMARY KEY NOT NULL,

View File

@ -19,6 +19,13 @@ pub struct UpdateRow {
pub id: i64,
pub timestamp: NaiveDateTime,
pub data: Buffer,
pub doc_id: Option<String>,
}
#[napi(object)]
pub struct InsertRow {
pub doc_id: Option<String>,
pub data: Uint8Array,
}
#[napi]
@ -53,6 +60,7 @@ impl SqliteConnection {
.execute(connection.as_mut())
.await
.map_err(anyhow::Error::from)?;
self.migrate_add_doc_id().await?;
connection.detach();
Ok(())
}
@ -74,7 +82,11 @@ impl SqliteConnection {
#[napi]
pub async fn get_blob(&self, key: String) -> Option<BlobRow> {
sqlx::query_as!(BlobRow, "SELECT * FROM blobs WHERE key = ?", key)
sqlx::query_as!(
BlobRow,
"SELECT key, data, timestamp FROM blobs WHERE key = ?",
key
)
.fetch_one(&self.pool)
.await
.ok()
@ -100,8 +112,54 @@ impl SqliteConnection {
}
#[napi]
pub async fn get_updates(&self) -> napi::Result<Vec<UpdateRow>> {
let updates = sqlx::query_as!(UpdateRow, "SELECT * FROM updates")
pub async fn get_updates(&self, doc_id: Option<String>) -> napi::Result<Vec<UpdateRow>> {
let updates = match doc_id {
Some(doc_id) => sqlx::query_as!(
UpdateRow,
"SELECT id, timestamp, data, doc_id FROM updates WHERE doc_id = ?",
doc_id
)
.fetch_all(&self.pool)
.await
.map_err(anyhow::Error::from)?,
None => sqlx::query_as!(
UpdateRow,
"SELECT id, timestamp, data, doc_id FROM updates WHERE doc_id is NULL",
)
.fetch_all(&self.pool)
.await
.map_err(anyhow::Error::from)?,
};
Ok(updates)
}
#[napi]
pub async fn get_updates_count(&self, doc_id: Option<String>) -> napi::Result<i32> {
let count = match doc_id {
Some(doc_id) => {
sqlx::query!(
"SELECT COUNT(*) as count FROM updates WHERE doc_id = ?",
doc_id
)
.fetch_one(&self.pool)
.await
.map_err(anyhow::Error::from)?
.count
}
None => {
sqlx::query!("SELECT COUNT(*) as count FROM updates WHERE doc_id is NULL")
.fetch_one(&self.pool)
.await
.map_err(anyhow::Error::from)?
.count
}
};
Ok(count)
}
#[napi]
pub async fn get_all_updates(&self) -> napi::Result<Vec<UpdateRow>> {
let updates = sqlx::query_as!(UpdateRow, "SELECT id, timestamp, data, doc_id FROM updates")
.fetch_all(&self.pool)
.await
.map_err(anyhow::Error::from)?;
@ -109,11 +167,51 @@ impl SqliteConnection {
}
#[napi]
pub async fn insert_updates(&self, updates: Vec<Uint8Array>) -> napi::Result<()> {
pub async fn insert_updates(&self, updates: Vec<InsertRow>) -> napi::Result<()> {
let mut transaction = self.pool.begin().await.map_err(anyhow::Error::from)?;
for update in updates.into_iter() {
let update = update.as_ref();
sqlx::query_as!(UpdateRow, "INSERT INTO updates (data) VALUES ($1)", update)
for InsertRow { data, doc_id } in updates {
let update = data.as_ref();
sqlx::query_as!(
UpdateRow,
"INSERT INTO updates (data, doc_id) VALUES ($1, $2)",
update,
doc_id
)
.execute(&mut *transaction)
.await
.map_err(anyhow::Error::from)?;
}
transaction.commit().await.map_err(anyhow::Error::from)?;
Ok(())
}
#[napi]
pub async fn replace_updates(
&self,
doc_id: Option<String>,
updates: Vec<InsertRow>,
) -> napi::Result<()> {
let mut transaction = self.pool.begin().await.map_err(anyhow::Error::from)?;
match doc_id {
Some(doc_id) => sqlx::query!("DELETE FROM updates where doc_id = ?", doc_id)
.execute(&mut *transaction)
.await
.map_err(anyhow::Error::from)?,
None => sqlx::query!("DELETE FROM updates where doc_id is NULL",)
.execute(&mut *transaction)
.await
.map_err(anyhow::Error::from)?,
};
for InsertRow { data, doc_id } in updates {
let update = data.as_ref();
sqlx::query_as!(
UpdateRow,
"INSERT INTO updates (data, doc_id) VALUES ($1, $2)",
update,
doc_id
)
.execute(&mut *transaction)
.await
.map_err(anyhow::Error::from)?;
@ -158,4 +256,22 @@ impl SqliteConnection {
false
}
}
// todo: have a better way to handle migration
async fn migrate_add_doc_id(&self) -> Result<(), anyhow::Error> {
// ignore errors
match sqlx::query("ALTER TABLE updates ADD COLUMN doc_id TEXT")
.execute(&self.pool)
.await
{
Ok(_) => Ok(()),
Err(err) => {
if err.to_string().contains("duplicate column name") {
Ok(()) // Ignore error if it's due to duplicate column
} else {
Err(anyhow::Error::from(err)) // Propagate other errors
}
}
}
}
}

View File

@ -140,7 +140,7 @@ describe('ydoc sync', () => {
const pageId = uuidv4();
const page1 = workspace1.createPage({ id: pageId });
await page1.waitForLoaded()
await page1.waitForLoaded();
const pageBlockId = page1.addBlock('affine:page', {
title: new page1.Text(''),
});
@ -153,7 +153,7 @@ describe('ydoc sync', () => {
workspace1.doc.getMap(`space:${pageId}`).toJSON()
);
const page2 = workspace2.getPage(pageId) as Page;
await page2.waitForLoaded()
await page2.waitForLoaded();
page1.updateBlock(
page1.getBlockById(paragraphId) as ParagraphBlockModel,
{

View File

@ -7,7 +7,10 @@ import type { Y as YType } from '@blocksuite/store';
import { uuidv4, Workspace } from '@blocksuite/store';
import { beforeEach, describe, expect, test, vi } from 'vitest';
import { createSQLiteDBDownloadProvider, createSQLiteProvider } from '../index';
import {
createSQLiteDBDownloadProvider,
createSQLiteProvider,
} from '../sqlite-providers';
const Y = Workspace.Y;
@ -148,15 +151,21 @@ describe('SQLite download provider', () => {
test('disconnect handlers', async () => {
const offHandler = vi.fn();
let handleUpdate = () => {};
workspace.doc.on = (_: string, fn: () => void) => {
let handleSubdocs = () => {};
workspace.doc.on = (event: string, fn: () => void) => {
if (event === 'update') {
handleUpdate = fn;
} else if (event === 'subdocs') {
handleSubdocs = fn;
}
};
workspace.doc.off = offHandler;
await provider.connect();
provider.connect();
provider.disconnect();
expect(triggerDBUpdate).toBe(null);
expect(offHandler).toBeCalledWith('update', handleUpdate);
expect(offHandler).toBeCalledWith('subdocs', handleSubdocs);
});
});

View File

@ -3,8 +3,6 @@ import type {
AffineWebSocketProvider,
LocalIndexedDBBackgroundProvider,
LocalIndexedDBDownloadProvider,
SQLiteDBDownloadProvider,
SQLiteProvider,
} from '@affine/env/workspace';
import type { Disposable, DocProviderCreator } from '@blocksuite/store';
import { assertExists, Workspace } from '@blocksuite/store';
@ -21,6 +19,10 @@ import { getLoginStorage, storageChangeSlot } from '../affine/login';
import { CallbackSet } from '../utils';
import { createAffineDownloadProvider } from './affine-download';
import { localProviderLogger as logger } from './logger';
import {
createSQLiteDBDownloadProvider,
createSQLiteProvider,
} from './sqlite-providers';
const Y = Workspace.Y;
@ -151,151 +153,6 @@ const createIndexedDBDownloadProvider: DocProviderCreator = (
};
};
const sqliteOrigin = Symbol('sqlite-provider-origin');
const createSQLiteProvider: DocProviderCreator = (id, doc): SQLiteProvider => {
const { apis, events } = window;
// make sure it is being used in Electron with APIs
assertExists(apis);
assertExists(events);
function handleUpdate(update: Uint8Array, origin: unknown) {
if (origin === sqliteOrigin) {
return;
}
apis.db.applyDocUpdate(id, update).catch(err => {
console.error(err);
});
}
let unsubscribe = () => {};
let connected = false;
const connect = () => {
logger.info('connecting sqlite provider', id);
doc.on('update', handleUpdate);
unsubscribe = events.db.onExternalUpdate(
({
update,
workspaceId,
}: {
workspaceId: string;
update: Uint8Array;
}) => {
if (workspaceId === id) {
Y.applyUpdate(doc, update, sqliteOrigin);
}
}
);
connected = true;
logger.info('connecting sqlite done', id);
};
const cleanup = () => {
logger.info('disconnecting sqlite provider', id);
unsubscribe();
doc.off('update', handleUpdate);
connected = false;
};
return {
flavour: 'sqlite',
passive: true,
get connected(): boolean {
return connected;
},
cleanup,
connect,
disconnect: cleanup,
};
};
const createSQLiteDBDownloadProvider: DocProviderCreator = (
id,
doc
): SQLiteDBDownloadProvider => {
const { apis } = window;
let disconnected = false;
let _resolve: () => void;
let _reject: (error: unknown) => void;
const promise = new Promise<void>((resolve, reject) => {
_resolve = resolve;
_reject = reject;
});
async function syncUpdates() {
logger.info('syncing updates from sqlite', id);
const updates = await apis.db.getDocAsUpdates(id);
if (disconnected) {
return;
}
if (updates) {
Y.applyUpdate(doc, updates, sqliteOrigin);
}
const diff = Y.encodeStateAsUpdate(doc, updates);
// also apply updates to sqlite
await apis.db.applyDocUpdate(id, diff);
}
// fixme(pengx17): should n't sync blob in doc provider
// async function _syncBlobIntoSQLite(bs: BlobManager) {
// const persistedKeys = await apis.db.getBlobKeys(id);
//
// if (disconnected) {
// return;
// }
//
// const allKeys = await bs.list().catch(() => []);
// const keysToPersist = allKeys.filter(k => !persistedKeys.includes(k));
//
// logger.info('persisting blobs', keysToPersist, 'to sqlite');
// return Promise.all(
// keysToPersist.map(async k => {
// const blob = await bs.get(k);
// if (!blob) {
// logger.warn('blob not found for', k);
// return;
// }
//
// if (disconnected) {
// return;
// }
//
// return apis?.db.addBlob(
// id,
// k,
// new Uint8Array(await blob.arrayBuffer())
// );
// })
// );
// }
return {
flavour: 'sqlite-download',
active: true,
get whenReady() {
return promise;
},
cleanup: () => {
disconnected = true;
},
sync: async () => {
logger.info('connect indexeddb provider', id);
try {
await syncUpdates();
_resolve();
} catch (error) {
_reject(error);
}
},
};
};
export {
createAffineDownloadProvider,
createAffineWebSocketProvider,

View File

@ -0,0 +1,212 @@
import type {
SQLiteDBDownloadProvider,
SQLiteProvider,
} from '@affine/env/workspace';
import type { DocProviderCreator } from '@blocksuite/store';
import {
assertExists,
Workspace as BlockSuiteWorkspace,
} from '@blocksuite/store';
import type { Doc } from 'yjs';
import { localProviderLogger as logger } from './logger';
const Y = BlockSuiteWorkspace.Y;
const sqliteOrigin = Symbol('sqlite-provider-origin');
type SubDocsEvent = {
added: Set<Doc>;
removed: Set<Doc>;
loaded: Set<Doc>;
};
/**
* A provider that is responsible for syncing updates the workspace with the local SQLite database.
*/
export const createSQLiteProvider: DocProviderCreator = (
id,
rootDoc
): SQLiteProvider => {
const { apis, events } = window;
// make sure it is being used in Electron with APIs
assertExists(apis);
assertExists(events);
const updateHandlerMap = new WeakMap<
Doc,
(update: Uint8Array, origin: unknown) => void
>();
const subDocsHandlerMap = new WeakMap<Doc, (event: SubDocsEvent) => void>();
const createOrHandleUpdate = (doc: Doc) => {
if (updateHandlerMap.has(doc)) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return updateHandlerMap.get(doc)!;
}
function handleUpdate(update: Uint8Array, origin: unknown) {
if (origin === sqliteOrigin) {
return;
}
const subdocId = doc.guid === id ? undefined : doc.guid;
apis.db.applyDocUpdate(id, update, subdocId).catch(err => {
logger.error(err);
});
}
updateHandlerMap.set(doc, handleUpdate);
return handleUpdate;
};
const createOrGetHandleSubDocs = (doc: Doc) => {
if (subDocsHandlerMap.has(doc)) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return subDocsHandlerMap.get(doc)!;
}
function handleSubdocs(event: SubDocsEvent) {
event.removed.forEach(doc => {
untrackDoc(doc);
});
event.loaded.forEach(doc => {
trackDoc(doc);
});
}
subDocsHandlerMap.set(doc, handleSubdocs);
return handleSubdocs;
};
function trackDoc(doc: Doc) {
doc.on('update', createOrHandleUpdate(doc));
doc.on('subdocs', createOrGetHandleSubDocs(doc));
doc.subdocs.forEach(doc => {
trackDoc(doc);
});
}
function untrackDoc(doc: Doc) {
doc.subdocs.forEach(doc => {
untrackDoc(doc);
});
doc.off('update', createOrHandleUpdate(doc));
doc.off('subdocs', createOrGetHandleSubDocs(doc));
}
let unsubscribe = () => {};
let connected = false;
const connect = () => {
logger.info('connecting sqlite provider', id);
trackDoc(rootDoc);
unsubscribe = events.db.onExternalUpdate(
({
update,
workspaceId,
docId,
}: {
workspaceId: string;
update: Uint8Array;
docId?: string;
}) => {
if (workspaceId === id) {
if (docId) {
for (const doc of rootDoc.subdocs) {
if (doc.guid === docId) {
Y.applyUpdate(doc, update, sqliteOrigin);
return;
}
}
} else {
Y.applyUpdate(rootDoc, update, sqliteOrigin);
}
}
}
);
connected = true;
logger.info('connecting sqlite done', id);
};
const cleanup = () => {
logger.info('disconnecting sqlite provider', id);
unsubscribe();
untrackDoc(rootDoc);
connected = false;
};
return {
flavour: 'sqlite',
passive: true,
get connected(): boolean {
return connected;
},
cleanup,
connect,
disconnect: cleanup,
};
};
/**
* A provider that is responsible for DOWNLOADING updates from the local SQLite database.
*/
export const createSQLiteDBDownloadProvider: DocProviderCreator = (
id,
rootDoc
): SQLiteDBDownloadProvider => {
const { apis } = window;
let disconnected = false;
let _resolve: () => void;
let _reject: (error: unknown) => void;
const promise = new Promise<void>((resolve, reject) => {
_resolve = resolve;
_reject = reject;
});
async function syncUpdates(doc: Doc) {
logger.info('syncing updates from sqlite', id);
const subdocId = doc.guid === id ? undefined : doc.guid;
const updates = await apis.db.getDocAsUpdates(id, subdocId);
if (disconnected) {
return false;
}
if (updates) {
Y.applyUpdate(doc, updates, sqliteOrigin);
}
const diff = Y.encodeStateAsUpdate(doc, updates);
// also apply updates to sqlite
await apis.db.applyDocUpdate(id, diff, subdocId);
return true;
}
async function syncAllUpdates(doc: Doc) {
if (await syncUpdates(doc)) {
const subdocs = Array.from(doc.subdocs).filter(d => d.shouldLoad);
await Promise.all(subdocs.map(syncAllUpdates));
}
}
return {
flavour: 'sqlite-download',
active: true,
get whenReady() {
return promise;
},
cleanup: () => {
disconnected = true;
},
sync: async () => {
logger.info('connect indexeddb provider', id);
try {
await syncAllUpdates(rootDoc);
_resolve();
} catch (error) {
_reject(error);
}
},
};
};

View File

@ -107,6 +107,7 @@ describe('indexeddb provider', () => {
})
.register(AffineSchemas)
.register(__unstableSchemas);
// data should only contain updates for the root doc
data.updates.forEach(({ update }) => {
Workspace.Y.applyUpdate(testWorkspace.doc, update);
});
@ -125,19 +126,6 @@ describe('indexeddb provider', () => {
}
expect(workspace.doc.toJSON()).toEqual(testWorkspace.doc.toJSON());
}
const secondWorkspace = new Workspace({
id,
})
.register(AffineSchemas)
.register(__unstableSchemas);
const provider2 = createIndexedDBProvider(secondWorkspace.doc, rootDBName);
provider2.connect();
await provider2.whenSynced;
const page = secondWorkspace.getPage('page0');
assertExists(page);
await page.waitForLoaded();
expect(workspace.doc.toJSON()).toEqual(secondWorkspace.doc.toJSON());
});
test('disconnect suddenly', async () => {
@ -423,6 +411,7 @@ describe('subDoc', () => {
provider.disconnect();
json2 = doc.toJSON();
}
// the following line compares {} with {}
expect(json1['']['1'].toJSON()).toEqual(json2['']['1'].toJSON());
expect(json1['']['2']).toEqual(json2['']['2']);
});

View File

@ -195,10 +195,12 @@ const selectDateFromDatePicker = async (page: Page, date: Date) => {
);
await nextMonthButton.click();
}
const map = ['th', 'st', 'nd', 'rd']
const map = ['th', 'st', 'nd', 'rd'];
// Click on the day cell
const dateCell = page.locator(
`[aria-disabled="false"][aria-label="Choose ${weekday}, ${month} ${day}${map[Number.parseInt(day) % 10] ?? 'th'}, ${year}"]`
`[aria-disabled="false"][aria-label="Choose ${weekday}, ${month} ${day}${
map[Number.parseInt(day) % 10] ?? 'th'
}, ${year}"]`
);
await dateCell.click();
};