mirror of
https://github.com/toeverything/AFFiNE.git
synced 2024-12-24 20:41:49 +03:00
refactor(electron): sqlite db data workflow (remove symlink & fs watcher) (#2491)
This commit is contained in:
parent
f3ac12254c
commit
20cf45270d
@ -2,12 +2,11 @@ import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import type { Subscription } from 'rxjs';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { MainIPCHandlerMap } from '../../../../constraints';
|
||||
import type { MainIPCHandlerMap } from '../../../constraints';
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
@ -42,6 +41,7 @@ ReturnType<MainIPCHandlerMap[T][F]> {
|
||||
}
|
||||
|
||||
const SESSION_DATA_PATH = path.join(__dirname, './tmp', 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(__dirname, './tmp', 'affine-test-documents');
|
||||
|
||||
const browserWindow = {
|
||||
isDestroyed: () => {
|
||||
@ -92,8 +92,12 @@ function compareBuffer(a: Uint8Array | null, b: Uint8Array | null) {
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
assert(name === 'sessionData');
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
@ -123,27 +127,23 @@ vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
let connectableSubscription: Subscription;
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../register');
|
||||
const { registerHandlers } = await import('../handlers');
|
||||
registerHandlers();
|
||||
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../../events');
|
||||
const { registerEvents } = await import('../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
const { database$ } = await import('../db/ensure-db');
|
||||
await import('../db/ensure-db');
|
||||
|
||||
connectableSubscription = database$.connect();
|
||||
registeredHandlers.get('ready')?.forEach(fn => fn());
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
|
||||
connectableSubscription.unsubscribe();
|
||||
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
@ -157,55 +157,26 @@ describe('ensureSQLiteDB', () => {
|
||||
expect(fileExists).toBe(true);
|
||||
});
|
||||
|
||||
test('when db file is removed', async () => {
|
||||
// stub webContents.send
|
||||
const sendSpy = vi.spyOn(browserWindow.webContents, 'send');
|
||||
const id = v4();
|
||||
test('should emit the same db instance for the same id', async () => {
|
||||
const [id1, id2] = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
let workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
// Can't remove file on Windows, because the sqlite is still holding the file handle
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.remove(file);
|
||||
|
||||
// wait for 2000ms for file watcher to detect file removal
|
||||
await delay(2000);
|
||||
|
||||
expect(sendSpy).toBeCalledWith('db:onDBFileMissing', id);
|
||||
|
||||
// ensureSQLiteDB should recreate the db file
|
||||
workspaceDB = await ensureSQLiteDB(id);
|
||||
const fileExists2 = await fs.pathExists(file);
|
||||
expect(fileExists2).toBe(true);
|
||||
sendSpy.mockRestore();
|
||||
const workspaceDB1 = await ensureSQLiteDB(id1);
|
||||
const workspaceDB2 = await ensureSQLiteDB(id2);
|
||||
const workspaceDB3 = await ensureSQLiteDB(id1);
|
||||
expect(workspaceDB1).toBe(workspaceDB3);
|
||||
expect(workspaceDB1).not.toBe(workspaceDB2);
|
||||
});
|
||||
|
||||
test('when db file is updated', async () => {
|
||||
test('when app quit, db should be closed', async () => {
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const { dbSubjects } = await import('../../events/db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
const dbUpdateSpy = vi.spyOn(dbSubjects.dbFileUpdate, 'next');
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
await delay(100);
|
||||
// writes some data to the db file
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
// write again
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
|
||||
// wait for 2000ms for file watcher to detect file change
|
||||
await delay(2000);
|
||||
|
||||
expect(dbUpdateSpy).toBeCalledWith(id);
|
||||
dbUpdateSpy.mockRestore();
|
||||
expect(workspaceDB.db?.open).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@ -219,16 +190,14 @@ describe('workspace handlers', () => {
|
||||
});
|
||||
|
||||
test('delete workspace', async () => {
|
||||
// @TODO dispatch is hanging on Windows
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
const dbs = await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', ids[1]);
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual([ids[0]]);
|
||||
// deleted db should be closed
|
||||
expect(dbs[1].db?.open).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@ -290,7 +259,7 @@ describe('db handlers', () => {
|
||||
|
||||
test('list blobs (empty)', async () => {
|
||||
const workspaceId = v4();
|
||||
const list = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
const list = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(list).toEqual([]);
|
||||
});
|
||||
|
||||
@ -320,14 +289,14 @@ describe('db handlers', () => {
|
||||
).toBe(true);
|
||||
|
||||
// list blobs
|
||||
let lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
let lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toHaveLength(2);
|
||||
expect(lists).toContain('testBin');
|
||||
expect(lists).toContain('testBin2');
|
||||
|
||||
// delete blob
|
||||
await dispatch('db', 'deleteBlob', workspaceId, 'testBin');
|
||||
lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toEqual(['testBin2']);
|
||||
});
|
||||
});
|
||||
@ -409,10 +378,10 @@ describe('dialog handlers', () => {
|
||||
expect(res.error).toBe('DB_FILE_PATH_INVALID');
|
||||
});
|
||||
|
||||
test('loadDBFile (error, not a valid db file)', async () => {
|
||||
test('loadDBFile (error, not a valid affine file)', async () => {
|
||||
// create a random db file
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const dbPath = path.join(basePath, 'xxx.db');
|
||||
const dbPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.writeFile(dbPath, 'hello world');
|
||||
|
||||
@ -428,7 +397,7 @@ describe('dialog handlers', () => {
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('loadDBFile', async () => {
|
||||
test('loadDBFile (correct)', async () => {
|
||||
// we use ensureSQLiteDB to create a valid db file
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
@ -436,65 +405,66 @@ describe('dialog handlers', () => {
|
||||
|
||||
// copy db file to dbPath
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const originDBFilePath = path.join(basePath, 'xxx.db');
|
||||
const clonedDBPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.copyFile(db.path, originDBFilePath);
|
||||
await fs.copyFile(db.path, clonedDBPath);
|
||||
|
||||
// on Windows, we skip this test because we can't delete the db file
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
|
||||
// remove db
|
||||
await fs.remove(db.path);
|
||||
// delete workspace
|
||||
await dispatch('workspace', 'delete', id);
|
||||
|
||||
// try load originDBFilePath
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [originDBFilePath] };
|
||||
return { filePaths: [clonedDBPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.workspaceId).not.toBeUndefined();
|
||||
const newId = res.workspaceId;
|
||||
|
||||
const importedDb = await ensureSQLiteDB(res.workspaceId!);
|
||||
expect(await fs.realpath(importedDb.path)).toBe(originDBFilePath);
|
||||
expect(importedDb.path).not.toBe(originDBFilePath);
|
||||
expect(newId).not.toBeUndefined();
|
||||
|
||||
assert(newId);
|
||||
|
||||
const meta = await dispatch('workspace', 'getMeta', newId);
|
||||
|
||||
expect(meta.secondaryDBPath).toBe(clonedDBPath);
|
||||
|
||||
// try load it again, will trigger error (db file already loaded)
|
||||
const res2 = await dispatch('dialog', 'loadDBFile');
|
||||
expect(res2.error).toBe('DB_FILE_ALREADY_LOADED');
|
||||
});
|
||||
|
||||
test('moveDBFile', async () => {
|
||||
test('moveDBFile (valid)', async () => {
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'xxx');
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: newPath };
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [newPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(newPath);
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
assert(res.filePath);
|
||||
expect(path.dirname(res.filePath)).toBe(newPath);
|
||||
expect(res.filePath.endsWith('.affine')).toBe(true);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('moveDBFile (skipped)', async () => {
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: null };
|
||||
test('moveDBFile (canceled)', async () => {
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: null };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(undefined);
|
||||
electronModule.dialog = {};
|
||||
});
|
@ -1,9 +1,9 @@
|
||||
import { app, Menu } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../utils';
|
||||
import { subjects } from './events';
|
||||
import { checkForUpdatesAndNotify } from './handlers/updater';
|
||||
import { revealLogFile } from './logger';
|
||||
import { isMacOS } from '../../../utils';
|
||||
import { revealLogFile } from '../logger';
|
||||
import { checkForUpdatesAndNotify } from '../updater';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
// Unique id for menuitems
|
||||
const MENUITEM_NEW_PAGE = 'affine:new-page';
|
||||
@ -43,7 +43,7 @@ export function createApplicationMenu() {
|
||||
label: 'New Page',
|
||||
accelerator: isMac ? 'Cmd+N' : 'Ctrl+N',
|
||||
click: () => {
|
||||
subjects.applicationMenu.newPageAction.next();
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
},
|
||||
},
|
||||
{ type: 'separator' },
|
||||
@ -117,7 +117,7 @@ export function createApplicationMenu() {
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Open logs folder',
|
||||
label: 'Open log file',
|
||||
click: async () => {
|
||||
revealLogFile();
|
||||
},
|
@ -1,10 +1,8 @@
|
||||
import { Subject } from 'rxjs';
|
||||
import type { MainEventListener } from '../type';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
||||
export * from './create';
|
||||
export * from './subject';
|
||||
|
||||
/**
|
||||
* Events triggered by application menu
|
@ -0,0 +1,5 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
152
apps/electron/layers/main/src/db/base-db-adapter.ts
Normal file
152
apps/electron/layers/main/src/db/base-db-adapter.ts
Normal file
@ -0,0 +1,152 @@
|
||||
import assert from 'assert';
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A base class for SQLite DB adapter that provides basic methods around updates & blobs
|
||||
*/
|
||||
export abstract class BaseSQLiteAdapter {
|
||||
db: Database | null = null;
|
||||
abstract role: string;
|
||||
|
||||
constructor(public path: string) {}
|
||||
|
||||
ensureTables() {
|
||||
assert(this.db, 'db is not connected');
|
||||
this.db.exec(schemas.join(';'));
|
||||
}
|
||||
|
||||
// todo: what if SQLite DB wrapper later is not sync?
|
||||
connect() {
|
||||
if (this.db) {
|
||||
return;
|
||||
}
|
||||
logger.log(`[SQLiteAdapter][${this.role}] open db`, this.path);
|
||||
const db = (this.db = sqlite(this.path));
|
||||
this.ensureTables();
|
||||
return db;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
addBlob(key: string, data: Uint8Array) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
deleteBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlobKeys() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
getUpdates() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// add a single update to SQLite
|
||||
addUpdateToSQLite(updates: Uint8Array[]) {
|
||||
// batch write instead write per key stroke?
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
const insertMany = this.db.transaction(updates => {
|
||||
for (const d of updates) {
|
||||
statement.run(d);
|
||||
}
|
||||
});
|
||||
|
||||
insertMany(updates);
|
||||
|
||||
logger.debug(
|
||||
`[SQLiteAdapter][${this.role}] addUpdateToSQLite`,
|
||||
'length:',
|
||||
updates.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
}
|
||||
}
|
94
apps/electron/layers/main/src/db/ensure-db.ts
Normal file
94
apps/electron/layers/main/src/db/ensure-db.ts
Normal file
@ -0,0 +1,94 @@
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
defer,
|
||||
firstValueFrom,
|
||||
from,
|
||||
fromEvent,
|
||||
interval,
|
||||
Observable,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
distinctUntilChanged,
|
||||
filter,
|
||||
ignoreElements,
|
||||
last,
|
||||
map,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import { getWorkspaceMeta$ } from '../workspace';
|
||||
import { SecondaryWorkspaceSQLiteDB } from './secondary-db';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
import { openWorkspaceDatabase } from './workspace-db-adapter';
|
||||
|
||||
const db$Map = new Map<string, Observable<WorkspaceSQLiteDB>>();
|
||||
|
||||
const beforeQuit$ = defer(() => fromEvent(app, 'before-quit'));
|
||||
|
||||
function getWorkspaceDB$(id: string) {
|
||||
if (!db$Map.has(id)) {
|
||||
db$Map.set(
|
||||
id,
|
||||
from(openWorkspaceDatabase(appContext, id)).pipe(
|
||||
switchMap(db => {
|
||||
return startPollingSecondaryDB(db).pipe(
|
||||
ignoreElements(),
|
||||
startWith(db),
|
||||
takeUntil(beforeQuit$),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[ensureSQLiteDB] close db connection');
|
||||
db.destroy();
|
||||
db$Map.delete(id);
|
||||
},
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
);
|
||||
}
|
||||
return db$Map.get(id)!;
|
||||
}
|
||||
|
||||
// fixme: this function has issue on registering multiple times...
|
||||
function startPollingSecondaryDB(db: WorkspaceSQLiteDB) {
|
||||
const meta$ = getWorkspaceMeta$(db.workspaceId);
|
||||
const secondaryDB$ = meta$.pipe(
|
||||
map(meta => meta?.secondaryDBPath),
|
||||
distinctUntilChanged(),
|
||||
filter((p): p is string => !!p),
|
||||
switchMap(path => {
|
||||
const secondaryDB = new SecondaryWorkspaceSQLiteDB(path, db);
|
||||
return new Observable<SecondaryWorkspaceSQLiteDB>(observer => {
|
||||
observer.next(secondaryDB);
|
||||
return () => {
|
||||
secondaryDB.destroy();
|
||||
};
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// pull every 30 seconds
|
||||
const poll$ = interval(30000).pipe(
|
||||
switchMap(() => secondaryDB$),
|
||||
tap({
|
||||
next: secondaryDB => {
|
||||
secondaryDB.pull();
|
||||
},
|
||||
}),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
return poll$.pipe(takeUntil(db.update$.pipe(last())), shareReplay(1));
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
return firstValueFrom(getWorkspaceDB$(id));
|
||||
}
|
38
apps/electron/layers/main/src/db/helper.ts
Normal file
38
apps/electron/layers/main/src/db/helper.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
export function isValidateDB(db: Database) {
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
return false;
|
||||
} finally {
|
||||
db?.close();
|
||||
}
|
||||
}
|
@ -1,8 +1,10 @@
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { appContext } from '../context';
|
||||
import type { MainEventListener, NamespaceHandlers } from '../type';
|
||||
import { ensureSQLiteDB } from './ensure-db';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export * from './ensure-db';
|
||||
export * from './subjects';
|
||||
|
||||
export const dbHandlers = {
|
||||
getDocAsUpdates: async (_, id: string) => {
|
||||
@ -25,18 +27,22 @@ export const dbHandlers = {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.deleteBlob(key);
|
||||
},
|
||||
getPersistedBlobs: async (_, workspaceId: string) => {
|
||||
getBlobKeys: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getPersistentBlobKeys();
|
||||
return workspaceDB.getBlobKeys();
|
||||
},
|
||||
getDefaultStorageLocation: async () => {
|
||||
return appContext.appDataPath;
|
||||
},
|
||||
getDBFilePath: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return {
|
||||
path: workspaceDB.path,
|
||||
realPath: await fs.realpath(workspaceDB.path),
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export const dbEvents = {
|
||||
onExternalUpdate: (
|
||||
fn: (update: { workspaceId: string; update: Uint8Array }) => void
|
||||
) => {
|
||||
const sub = dbSubjects.externalUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
} satisfies Record<string, MainEventListener>;
|
191
apps/electron/layers/main/src/db/secondary-db.ts
Normal file
191
apps/electron/layers/main/src/db/secondary-db.ts
Normal file
@ -0,0 +1,191 @@
|
||||
import { debounce } from 'lodash-es';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
|
||||
const FLUSH_WAIT_TIME = 5000;
|
||||
const FLUSH_MAX_WAIT_TIME = 10000;
|
||||
|
||||
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'secondary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
updateQueue: Uint8Array[] = [];
|
||||
|
||||
unsubscribers = new Set<() => void>();
|
||||
|
||||
constructor(
|
||||
public override path: string,
|
||||
public upstream: WorkspaceSQLiteDB
|
||||
) {
|
||||
super(path);
|
||||
this.setupAndListen();
|
||||
}
|
||||
|
||||
close() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.unsubscribers.forEach(unsub => unsub());
|
||||
this.db?.close();
|
||||
this.yDoc.destroy();
|
||||
this.close();
|
||||
}
|
||||
|
||||
get workspaceId() {
|
||||
return this.upstream.workspaceId;
|
||||
}
|
||||
|
||||
// do not update db immediately, instead, push to a queue
|
||||
// and flush the queue in a future time
|
||||
addUpdateToUpdateQueue(update: Uint8Array) {
|
||||
this.updateQueue.push(update);
|
||||
this.debouncedFlush();
|
||||
}
|
||||
|
||||
flushUpdateQueue() {
|
||||
logger.debug(
|
||||
'flushUpdateQueue',
|
||||
this.workspaceId,
|
||||
'queue',
|
||||
this.updateQueue.length
|
||||
);
|
||||
const updates = [...this.updateQueue];
|
||||
this.updateQueue = [];
|
||||
this.connect();
|
||||
this.addUpdateToSQLite(updates);
|
||||
this.close();
|
||||
}
|
||||
|
||||
// flush after 5s, but will not wait for more than 10s
|
||||
debouncedFlush = debounce(this.flushUpdateQueue, FLUSH_WAIT_TIME, {
|
||||
maxWait: FLUSH_MAX_WAIT_TIME,
|
||||
});
|
||||
|
||||
runCounter = 0;
|
||||
|
||||
// wrap the fn with connect and close
|
||||
// it only works for sync functions
|
||||
run = (fn: () => void) => {
|
||||
try {
|
||||
if (this.runCounter === 0) {
|
||||
this.connect();
|
||||
}
|
||||
this.runCounter++;
|
||||
fn();
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
} finally {
|
||||
this.runCounter--;
|
||||
if (this.runCounter === 0) {
|
||||
this.close();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
setupAndListen() {
|
||||
if (this.firstConnected) {
|
||||
return;
|
||||
}
|
||||
this.firstConnected = true;
|
||||
|
||||
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
// update to upstream yDoc should be replicated to self yDoc
|
||||
this.applyUpdate(update, 'upstream');
|
||||
}
|
||||
};
|
||||
|
||||
const onSelfUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
// for self update from upstream, we need to push it to external DB
|
||||
if (origin === 'upstream') {
|
||||
this.addUpdateToUpdateQueue(update);
|
||||
}
|
||||
|
||||
if (origin === 'self') {
|
||||
this.upstream.applyUpdate(update, 'external');
|
||||
}
|
||||
};
|
||||
|
||||
// listen to upstream update
|
||||
this.upstream.yDoc.on('update', onUpstreamUpdate);
|
||||
this.yDoc.on('update', onSelfUpdate);
|
||||
|
||||
this.unsubscribers.add(() => {
|
||||
this.upstream.yDoc.off('update', onUpstreamUpdate);
|
||||
this.yDoc.off('update', onSelfUpdate);
|
||||
});
|
||||
|
||||
this.run(() => {
|
||||
// apply all updates from upstream
|
||||
const upstreamUpdate = this.upstream.getDocAsUpdates();
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(upstreamUpdate, 'upstream');
|
||||
|
||||
this.pull();
|
||||
});
|
||||
}
|
||||
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'upstream') => {
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
// TODO: have a better solution to handle blobs
|
||||
syncBlobs() {
|
||||
this.run(() => {
|
||||
// pull blobs
|
||||
const blobsKeys = this.getBlobKeys();
|
||||
const upstreamBlobsKeys = this.upstream.getBlobKeys();
|
||||
// put every missing blob to upstream
|
||||
for (const key of blobsKeys) {
|
||||
if (!upstreamBlobsKeys.includes(key)) {
|
||||
const blob = this.getBlob(key);
|
||||
if (blob) {
|
||||
this.upstream.addBlob(key, blob);
|
||||
logger.debug('syncBlobs', this.workspaceId, key);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* pull from external DB file and apply to embedded yDoc
|
||||
* workflow:
|
||||
* - connect to external db
|
||||
* - get updates
|
||||
* - apply updates to local yDoc
|
||||
* - get blobs and put new blobs to upstream
|
||||
* - disconnect
|
||||
*/
|
||||
pull() {
|
||||
this.run(() => {
|
||||
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
|
||||
const updates = this.getUpdates().map(update => update.data);
|
||||
Y.transact(this.yDoc, () => {
|
||||
updates.forEach(update => {
|
||||
this.applyUpdate(update, 'self');
|
||||
});
|
||||
});
|
||||
logger.debug('pull external updates', this.path, updates.length);
|
||||
|
||||
this.syncBlobs();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSecondaryWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
return meta?.secondaryDBPath;
|
||||
}
|
7
apps/electron/layers/main/src/db/subjects.ts
Normal file
7
apps/electron/layers/main/src/db/subjects.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace id when the db file is missing
|
||||
fileMissing: new Subject<string>(),
|
||||
externalUpdate: new Subject<{ workspaceId: string; update: Uint8Array }>(),
|
||||
};
|
105
apps/electron/layers/main/src/db/workspace-db-adapter.ts
Normal file
105
apps/electron/layers/main/src/db/workspace-db-adapter.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import { Subject } from 'rxjs';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'primary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
update$ = new Subject<void>();
|
||||
|
||||
constructor(public override path: string, public workspaceId: string) {
|
||||
super(path);
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.db?.close();
|
||||
this.yDoc.destroy();
|
||||
|
||||
// when db is closed, we can safely remove it from ensure-db list
|
||||
this.update$.complete();
|
||||
}
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.yDoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
override connect() {
|
||||
const db = super.connect();
|
||||
|
||||
if (!this.firstConnected) {
|
||||
this.yDoc.on('update', (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin !== 'self') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
}
|
||||
if (origin === 'external') {
|
||||
logger.debug('external update', this.workspaceId);
|
||||
dbSubjects.externalUpdate.next({
|
||||
workspaceId: this.workspaceId,
|
||||
update,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const updates = this.getUpdates();
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
Y.transact(this.yDoc, () => {
|
||||
updates.forEach(update => {
|
||||
this.applyUpdate(update.data, 'self');
|
||||
});
|
||||
});
|
||||
|
||||
this.firstConnected = true;
|
||||
this.update$.next();
|
||||
|
||||
return db;
|
||||
}
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.yDoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'renderer') => {
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
override addBlob(key: string, value: Uint8Array) {
|
||||
const res = super.addBlob(key, value);
|
||||
this.update$.next();
|
||||
return res;
|
||||
}
|
||||
|
||||
override deleteBlob(key: string) {
|
||||
super.deleteBlob(key);
|
||||
this.update$.next();
|
||||
}
|
||||
|
||||
override addUpdateToSQLite(data: Uint8Array[]) {
|
||||
super.addUpdateToSQLite(data);
|
||||
this.update$.next();
|
||||
}
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
const db = new WorkspaceSQLiteDB(meta.mainDBPath, workspaceId);
|
||||
await db.connect();
|
||||
return db;
|
||||
}
|
@ -1,22 +1,31 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { app } from 'electron';
|
||||
import { dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
import { ensureSQLiteDB, isRemoveOrMoveEvent } from '../db/ensure-db';
|
||||
import type { WorkspaceSQLiteDB } from '../db/sqlite';
|
||||
import { getWorkspaceDBPath, isValidDBFile } from '../db/sqlite';
|
||||
import { listWorkspaces } from '../workspace/workspace';
|
||||
import { appContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { isValidDBFile } from '../db/helper';
|
||||
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
|
||||
import { logger } from '../logger';
|
||||
import {
|
||||
getWorkspaceDBPath,
|
||||
getWorkspaceMeta,
|
||||
listWorkspaces,
|
||||
storeWorkspaceMeta,
|
||||
} from '../workspace';
|
||||
|
||||
// NOTE:
|
||||
// we are using native dialogs because HTML dialogs do not give full file paths
|
||||
|
||||
export async function revealDBFile(workspaceId: string) {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
shell.showItemInFolder(await fs.realpath(workspaceDB.path));
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
if (!meta) {
|
||||
return;
|
||||
}
|
||||
shell.showItemInFolder(meta.secondaryDBPath ?? meta.mainDBPath);
|
||||
}
|
||||
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
@ -60,6 +69,14 @@ interface SaveDBFileResult {
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
const extension = 'affine';
|
||||
|
||||
function getDefaultDBFileName(name: string, id: string) {
|
||||
const fileName = `${name}_${id}.${extension}`;
|
||||
// make sure fileName is a valid file name
|
||||
return fileName.replace(/[/\\?%*:|"<>]/g, '-');
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Save" button in the "Save Workspace" dialog.
|
||||
*
|
||||
@ -77,7 +94,13 @@ export async function saveDBFileAs(
|
||||
title: 'Save Workspace',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${db.getWorkspaceName()}_${workspaceId}.db`,
|
||||
filters: [
|
||||
{
|
||||
extensions: [extension],
|
||||
name: '',
|
||||
},
|
||||
],
|
||||
defaultPath: getDefaultDBFileName(db.getWorkspaceName(), workspaceId),
|
||||
message: 'Save Workspace as a SQLite Database file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
@ -109,27 +132,20 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Set database location',
|
||||
showsTagField: false,
|
||||
(await dialog.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Set Workspace Storage Location',
|
||||
buttonLabel: 'Select',
|
||||
defaultPath: `workspace-storage.db`,
|
||||
defaultPath: app.getPath('documents'),
|
||||
message: "Select a location to store the workspace's database file",
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
const dir = ret.filePaths?.[0];
|
||||
if (ret.canceled || !dir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
// the same db file cannot be loaded twice
|
||||
if (await dbFileAlreadyLoaded(filePath)) {
|
||||
return {
|
||||
error: 'DB_FILE_ALREADY_LOADED',
|
||||
};
|
||||
}
|
||||
return { filePath };
|
||||
return { filePath: dir };
|
||||
} catch (err) {
|
||||
logger.error('selectDBFileLocation', err);
|
||||
return {
|
||||
@ -170,10 +186,10 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
{
|
||||
name: 'SQLite Database',
|
||||
// do we want to support other file format?
|
||||
extensions: ['db'],
|
||||
extensions: ['db', 'affine'],
|
||||
},
|
||||
],
|
||||
message: 'Load Workspace from a SQLite Database file',
|
||||
message: 'Load Workspace from a AFFiNE file',
|
||||
}));
|
||||
const filePath = ret.filePaths?.[0];
|
||||
if (ret.canceled || !filePath) {
|
||||
@ -197,14 +213,20 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
return { error: 'DB_FILE_INVALID' }; // invalid db file
|
||||
}
|
||||
|
||||
// symlink the db file to a new workspace id
|
||||
// copy the db file to a new workspace id
|
||||
const workspaceId = nanoid(10);
|
||||
const linkedFilePath = await getWorkspaceDBPath(appContext, workspaceId);
|
||||
const internalFilePath = getWorkspaceDBPath(appContext, workspaceId);
|
||||
|
||||
await fs.ensureDir(path.join(appContext.appDataPath, 'workspaces'));
|
||||
|
||||
await fs.symlink(filePath, linkedFilePath, 'file');
|
||||
logger.info(`loadDBFile, symlink: ${filePath} -> ${linkedFilePath}`);
|
||||
await fs.copy(filePath, internalFilePath);
|
||||
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
|
||||
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
id: workspaceId,
|
||||
mainDBPath: internalFilePath,
|
||||
secondaryDBPath: filePath,
|
||||
});
|
||||
|
||||
return { workspaceId };
|
||||
} catch (err) {
|
||||
@ -225,58 +247,51 @@ interface MoveDBFileResult {
|
||||
* This function is called when the user clicks the "Move" button in the "Move Workspace Storage" setting.
|
||||
*
|
||||
* It will
|
||||
* - move the source db file to a new location
|
||||
* - symlink the new location to the old db file
|
||||
* - copy the source db file to a new location
|
||||
* - remove the old db external file
|
||||
* - update the external db file path in the workspace meta
|
||||
* - return the new file path
|
||||
*/
|
||||
export async function moveDBFile(
|
||||
workspaceId: string,
|
||||
dbFileLocation?: string
|
||||
dbFileDir?: string
|
||||
): Promise<MoveDBFileResult> {
|
||||
let db: WorkspaceSQLiteDB | null = null;
|
||||
try {
|
||||
const { moveFile, FsWatcher } = await import('@affine/native');
|
||||
db = await ensureSQLiteDB(workspaceId);
|
||||
// get the real file path of db
|
||||
const realpath = await fs.realpath(db.path);
|
||||
const isLink = realpath !== db.path;
|
||||
const watcher = FsWatcher.watch(realpath, { recursive: false });
|
||||
const waitForRemove = new Promise<void>(resolve => {
|
||||
const subscription = watcher.subscribe(event => {
|
||||
if (isRemoveOrMoveEvent(event)) {
|
||||
subscription.unsubscribe();
|
||||
// resolve after FSWatcher in `database$` is fired
|
||||
setImmediate(() => {
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
const newFilePath =
|
||||
dbFileLocation ??
|
||||
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
|
||||
const oldDir = meta.secondaryDBPath
|
||||
? path.dirname(meta.secondaryDBPath)
|
||||
: null;
|
||||
const defaultDir = oldDir ?? app.getPath('documents');
|
||||
|
||||
const newName = getDefaultDBFileName(db.getWorkspaceName(), workspaceId);
|
||||
|
||||
const newDirPath =
|
||||
dbFileDir ??
|
||||
(
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
(await dialog.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Move Workspace Storage',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: realpath,
|
||||
buttonLabel: 'Move',
|
||||
defaultPath: defaultDir,
|
||||
message: 'Move Workspace storage file',
|
||||
}))
|
||||
).filePath;
|
||||
).filePaths?.[0];
|
||||
|
||||
// skips if
|
||||
// - user canceled the dialog
|
||||
// - user selected the same file
|
||||
// - user selected the same file in the link file in app data dir
|
||||
if (!newFilePath || newFilePath === realpath || db.path === newFilePath) {
|
||||
// - user selected the same dir
|
||||
if (!newDirPath || newDirPath === oldDir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
db.db.close();
|
||||
const newFilePath = path.join(newDirPath, newName);
|
||||
|
||||
if (await fs.pathExists(newFilePath)) {
|
||||
return {
|
||||
@ -284,24 +299,19 @@ export async function moveDBFile(
|
||||
};
|
||||
}
|
||||
|
||||
if (isLink) {
|
||||
// remove the old link to unblock new link
|
||||
await fs.unlink(db.path);
|
||||
logger.info(`[moveDBFile] copy ${meta.mainDBPath} -> ${newFilePath}`);
|
||||
|
||||
await fs.copy(meta.mainDBPath, newFilePath);
|
||||
|
||||
// remove the old db file, but we don't care if it fails
|
||||
if (meta.secondaryDBPath) {
|
||||
fs.remove(meta.secondaryDBPath);
|
||||
}
|
||||
|
||||
logger.info(`[moveDBFile] move ${realpath} -> ${newFilePath}`);
|
||||
|
||||
await moveFile(realpath, newFilePath);
|
||||
|
||||
await fs.ensureSymlink(newFilePath, db.path, 'file');
|
||||
logger.info(`[moveDBFile] symlink: ${realpath} -> ${newFilePath}`);
|
||||
// wait for the file move event emits to the FileWatcher in database$ in ensure-db.ts
|
||||
// so that the db will be destroyed and we can call the `ensureSQLiteDB` in the next step
|
||||
// or the FileWatcher will continue listen on the `realpath` and emit file change events
|
||||
// then the database will reload while receiving these events; and the moved database file will be recreated while reloading database
|
||||
await waitForRemove;
|
||||
logger.info(`removed`);
|
||||
await ensureSQLiteDB(workspaceId);
|
||||
// update meta
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: newFilePath,
|
||||
});
|
||||
|
||||
return {
|
||||
filePath: newFilePath,
|
||||
@ -317,7 +327,6 @@ export async function moveDBFile(
|
||||
|
||||
async function dbFileAlreadyLoaded(path: string) {
|
||||
const meta = await listWorkspaces(appContext);
|
||||
const realpath = await fs.realpath(path);
|
||||
const paths = meta.map(m => m[1].realpath);
|
||||
return paths.includes(realpath);
|
||||
const paths = meta.map(m => m[1].secondaryDBPath);
|
||||
return paths.includes(path);
|
||||
}
|
@ -1,14 +1,16 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { applicationMenuEvents } from './application-menu';
|
||||
import { dbEvents } from './db';
|
||||
import { updaterEvents } from './updater';
|
||||
import { logger } from './logger';
|
||||
import { updaterEvents } from './updater/event';
|
||||
import { workspaceEvents } from './workspace';
|
||||
|
||||
export const allEvents = {
|
||||
applicationMenu: applicationMenuEvents,
|
||||
db: dbEvents,
|
||||
updater: updaterEvents,
|
||||
applicationMenu: applicationMenuEvents,
|
||||
workspace: workspaceEvents,
|
||||
};
|
||||
|
||||
function getActiveWindows() {
|
@ -1,38 +0,0 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
interface DBFilePathMeta {
|
||||
workspaceId: string;
|
||||
path: string;
|
||||
realPath: string;
|
||||
}
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace ids
|
||||
dbFileMissing: new Subject<string>(),
|
||||
// emit workspace ids
|
||||
dbFileUpdate: new Subject<string>(),
|
||||
dbFilePathChange: new Subject<DBFilePathMeta>(),
|
||||
};
|
||||
|
||||
export const dbEvents = {
|
||||
onDBFileMissing: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileMissing.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDBFileUpdate: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDBFilePathChange: (fn: (meta: DBFilePathMeta) => void) => {
|
||||
const sub = dbSubjects.dbFilePathChange.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
@ -1,9 +0,0 @@
|
||||
export * from './register';
|
||||
|
||||
import { applicationMenuSubjects } from './application-menu';
|
||||
import { dbSubjects } from './db';
|
||||
|
||||
export const subjects = {
|
||||
db: dbSubjects,
|
||||
applicationMenu: applicationMenuSubjects,
|
||||
};
|
@ -1 +0,0 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
@ -2,4 +2,31 @@ import { allEvents as events } from './events';
|
||||
import { allHandlers as handlers } from './handlers';
|
||||
|
||||
// this will be used by preload script to expose all handlers and events to the renderer process
|
||||
// - register in exposeInMainWorld in preload
|
||||
// - provide type hints
|
||||
export { events, handlers };
|
||||
|
||||
export const getExposedMeta = () => {
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
};
|
||||
|
@ -1,21 +1,13 @@
|
||||
import { ipcMain } from 'electron';
|
||||
|
||||
import { getLogFilePath, logger, revealLogFile } from '../logger';
|
||||
import { dbHandlers } from './db';
|
||||
import { dialogHandlers } from './dialog';
|
||||
import { getLogFilePath, logger, revealLogFile } from './logger';
|
||||
import type { NamespaceHandlers } from './type';
|
||||
import { uiHandlers } from './ui';
|
||||
import { updaterHandlers } from './updater';
|
||||
import { workspaceHandlers } from './workspace';
|
||||
|
||||
type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export const debugHandlers = {
|
||||
revealLogFile: async () => {
|
||||
return revealLogFile();
|
||||
@ -27,12 +19,12 @@ export const debugHandlers = {
|
||||
|
||||
// Note: all of these handlers will be the single-source-of-truth for the apis exposed to the renderer process
|
||||
export const allHandlers = {
|
||||
workspace: workspaceHandlers,
|
||||
ui: uiHandlers,
|
||||
db: dbHandlers,
|
||||
dialog: dialogHandlers,
|
||||
debug: debugHandlers,
|
||||
dialog: dialogHandlers,
|
||||
ui: uiHandlers,
|
||||
updater: updaterHandlers,
|
||||
workspace: workspaceHandlers,
|
||||
} satisfies Record<string, NamespaceHandlers>;
|
||||
|
||||
export const registerHandlers = () => {
|
@ -1,160 +0,0 @@
|
||||
import type { NotifyEvent } from '@affine/native/event';
|
||||
import { createFSWatcher } from '@affine/native/fs-watcher';
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
connectable,
|
||||
defer,
|
||||
from,
|
||||
fromEvent,
|
||||
identity,
|
||||
lastValueFrom,
|
||||
Observable,
|
||||
ReplaySubject,
|
||||
Subject,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
debounceTime,
|
||||
exhaustMap,
|
||||
filter,
|
||||
groupBy,
|
||||
ignoreElements,
|
||||
mergeMap,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { subjects } from '../../events';
|
||||
import { logger } from '../../logger';
|
||||
import { ts } from '../../utils';
|
||||
import type { WorkspaceSQLiteDB } from './sqlite';
|
||||
import { openWorkspaceDatabase } from './sqlite';
|
||||
|
||||
const databaseInput$ = new Subject<string>();
|
||||
export const databaseConnector$ = new ReplaySubject<WorkspaceSQLiteDB>();
|
||||
|
||||
const groupedDatabaseInput$ = databaseInput$.pipe(groupBy(identity));
|
||||
|
||||
export const database$ = connectable(
|
||||
groupedDatabaseInput$.pipe(
|
||||
mergeMap(workspaceDatabase$ =>
|
||||
workspaceDatabase$.pipe(
|
||||
// only open the first db with the same workspaceId, and emit it to the downstream
|
||||
exhaustMap(workspaceId => {
|
||||
logger.info('[ensureSQLiteDB] open db connection', workspaceId);
|
||||
return from(openWorkspaceDatabase(appContext, workspaceId)).pipe(
|
||||
switchMap(db => {
|
||||
return startWatchingDBFile(db).pipe(
|
||||
// ignore all events and only emit the db to the downstream
|
||||
ignoreElements(),
|
||||
startWith(db)
|
||||
);
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[FSWatcher] close all watchers');
|
||||
createFSWatcher().close();
|
||||
},
|
||||
})
|
||||
),
|
||||
{
|
||||
connector: () => databaseConnector$,
|
||||
resetOnDisconnect: true,
|
||||
}
|
||||
);
|
||||
|
||||
export const databaseConnectableSubscription = database$.connect();
|
||||
|
||||
// 1. File delete
|
||||
// 2. File move
|
||||
// - on Linux, it's `type: { modify: { kind: 'rename', mode: 'from' } }`
|
||||
// - on Windows, it's `type: { remove: { kind: 'any' } }`
|
||||
// - on macOS, it's `type: { modify: { kind: 'rename', mode: 'any' } }`
|
||||
export function isRemoveOrMoveEvent(event: NotifyEvent) {
|
||||
return (
|
||||
typeof event.type === 'object' &&
|
||||
('remove' in event.type ||
|
||||
('modify' in event.type &&
|
||||
event.type.modify.kind === 'rename' &&
|
||||
(event.type.modify.mode === 'from' ||
|
||||
event.type.modify.mode === 'any')))
|
||||
);
|
||||
}
|
||||
|
||||
// if we removed the file, we will stop watching it
|
||||
function startWatchingDBFile(db: WorkspaceSQLiteDB) {
|
||||
const FSWatcher = createFSWatcher();
|
||||
return new Observable<NotifyEvent>(subscriber => {
|
||||
logger.info('[FSWatcher] start watching db file', db.workspaceId);
|
||||
const subscription = FSWatcher.watch(db.path, {
|
||||
recursive: false,
|
||||
}).subscribe(
|
||||
event => {
|
||||
logger.info('[FSWatcher]', event);
|
||||
subscriber.next(event);
|
||||
// remove file or move file, complete the observable and close db
|
||||
if (isRemoveOrMoveEvent(event)) {
|
||||
subscriber.complete();
|
||||
}
|
||||
},
|
||||
err => {
|
||||
subscriber.error(err);
|
||||
}
|
||||
);
|
||||
return () => {
|
||||
// destroy on unsubscribe
|
||||
logger.info('[FSWatcher] cleanup db file watcher', db.workspaceId);
|
||||
db.destroy();
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
}).pipe(
|
||||
debounceTime(1000),
|
||||
filter(event => !isRemoveOrMoveEvent(event)),
|
||||
tap({
|
||||
next: () => {
|
||||
logger.info(
|
||||
'[FSWatcher] db file changed on disk',
|
||||
db.workspaceId,
|
||||
ts() - db.lastUpdateTime,
|
||||
'ms'
|
||||
);
|
||||
db.reconnectDB();
|
||||
subjects.db.dbFileUpdate.next(db.workspaceId);
|
||||
},
|
||||
complete: () => {
|
||||
// todo: there is still a possibility that the file is deleted
|
||||
// but we didn't get the event soon enough and another event tries to
|
||||
// access the db
|
||||
logger.info('[FSWatcher] db file missing', db.workspaceId);
|
||||
subjects.db.dbFileMissing.next(db.workspaceId);
|
||||
db.destroy();
|
||||
},
|
||||
}),
|
||||
takeUntil(defer(() => fromEvent(app, 'before-quit')))
|
||||
);
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
const deferValue = lastValueFrom(
|
||||
database$.pipe(
|
||||
filter(db => db.workspaceId === id && db.db.open),
|
||||
take(1),
|
||||
tap({
|
||||
error: err => {
|
||||
logger.error('[ensureSQLiteDB] error', err);
|
||||
},
|
||||
})
|
||||
)
|
||||
);
|
||||
databaseInput$.next(id);
|
||||
return deferValue;
|
||||
}
|
@ -1,247 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
import fs from 'fs-extra';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { dbSubjects } from '../../events/db';
|
||||
import { logger } from '../../logger';
|
||||
import { ts } from '../../utils';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
const SQLITE_ORIGIN = Symbol('sqlite-origin');
|
||||
|
||||
export class WorkspaceSQLiteDB {
|
||||
db: Database;
|
||||
ydoc = new Y.Doc();
|
||||
firstConnect = false;
|
||||
lastUpdateTime = ts();
|
||||
destroyed = false;
|
||||
|
||||
constructor(public path: string, public workspaceId: string) {
|
||||
this.db = this.reconnectDB();
|
||||
}
|
||||
|
||||
// release resources
|
||||
destroy = () => {
|
||||
this.db?.close();
|
||||
this.ydoc.destroy();
|
||||
};
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.ydoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
reconnectDB = () => {
|
||||
logger.log('[WorkspaceSQLiteDB] open db', this.workspaceId);
|
||||
if (this.db) {
|
||||
this.db.close();
|
||||
}
|
||||
|
||||
fs.realpath(this.path)
|
||||
.then(realPath => {
|
||||
dbSubjects.dbFilePathChange.next({
|
||||
workspaceId: this.workspaceId,
|
||||
path: this.path,
|
||||
realPath,
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
// skip error
|
||||
});
|
||||
|
||||
// use cached version?
|
||||
const db = (this.db = sqlite(this.path));
|
||||
db.exec(schemas.join(';'));
|
||||
|
||||
if (!this.firstConnect) {
|
||||
this.ydoc.on('update', (update: Uint8Array, origin) => {
|
||||
if (origin !== SQLITE_ORIGIN) {
|
||||
this.addUpdateToSQLite(update);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Y.transact(this.ydoc, () => {
|
||||
const updates = this.getUpdates();
|
||||
updates.forEach(update => {
|
||||
// give SQLITE_ORIGIN to skip self update
|
||||
Y.applyUpdate(this.ydoc, update.data, SQLITE_ORIGIN);
|
||||
});
|
||||
});
|
||||
|
||||
this.lastUpdateTime = ts();
|
||||
|
||||
if (this.firstConnect) {
|
||||
logger.info('db reconnected', this.workspaceId);
|
||||
} else {
|
||||
logger.info('db connected', this.workspaceId);
|
||||
}
|
||||
|
||||
this.firstConnect = true;
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.ydoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array) => {
|
||||
Y.applyUpdate(this.ydoc, data);
|
||||
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
this.lastUpdateTime = ts();
|
||||
logger.debug('applyUpdate', this.workspaceId, this.lastUpdateTime);
|
||||
};
|
||||
|
||||
addBlob = (key: string, data: Uint8Array) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getBlob = (key: string) => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
deleteBlob = (key: string) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getPersistentBlobKeys = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getPersistentBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
private getUpdates = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
// batch write instead write per key stroke?
|
||||
private addUpdateToSQLite = (data: Uint8Array) => {
|
||||
try {
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
statement.run(data);
|
||||
logger.debug(
|
||||
'addUpdateToSQLite',
|
||||
this.workspaceId,
|
||||
'length:',
|
||||
data.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export async function getWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const dbPath = await getWorkspaceDBPath(context, workspaceId);
|
||||
return new WorkspaceSQLiteDB(dbPath, workspaceId);
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and onefor blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
db.close();
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
db?.close();
|
||||
return false;
|
||||
}
|
||||
}
|
@ -1 +0,0 @@
|
||||
export * from './register';
|
@ -1,8 +0,0 @@
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
@ -1,8 +0,0 @@
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { deleteWorkspace, listWorkspaces } from './workspace';
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
} satisfies NamespaceHandlers;
|
@ -1,60 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
|
||||
interface WorkspaceMeta {
|
||||
path: string;
|
||||
realpath: string;
|
||||
}
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces');
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
const meta = await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
const dbFilePath = path.join(basePath, dir.name, 'storage.db');
|
||||
if (dir.isDirectory() && (await fs.exists(dbFilePath))) {
|
||||
// try read storage.db under it
|
||||
const realpath = await fs.realpath(dbFilePath);
|
||||
return [dir.name, { path: dbFilePath, realpath }] as [
|
||||
string,
|
||||
WorkspaceMeta
|
||||
];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return meta.filter((w): w is [string, WorkspaceMeta] => !!w);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
@ -2,13 +2,13 @@ import './security-restrictions';
|
||||
|
||||
import { app } from 'electron';
|
||||
|
||||
import { createApplicationMenu } from './application-menu';
|
||||
import { createApplicationMenu } from './application-menu/create';
|
||||
import { registerEvents } from './events';
|
||||
import { registerHandlers } from './handlers';
|
||||
import { registerUpdater } from './handlers/updater';
|
||||
import { logger } from './logger';
|
||||
import { restoreOrCreateWindow } from './main-window';
|
||||
import { registerProtocol } from './protocol';
|
||||
import { registerUpdater } from './updater';
|
||||
|
||||
if (require('electron-squirrel-startup')) app.quit();
|
||||
// allow tests to overwrite app name through passing args
|
||||
@ -62,14 +62,3 @@ app
|
||||
.then(createApplicationMenu)
|
||||
.then(registerUpdater)
|
||||
.catch(e => console.error('Failed create window:', e));
|
||||
/**
|
||||
* Check new app version in production mode only
|
||||
*/
|
||||
// FIXME: add me back later
|
||||
// if (import.meta.env.PROD) {
|
||||
// app
|
||||
// .whenReady()
|
||||
// .then(() => import('electron-updater'))
|
||||
// .then(({ autoUpdater }) => autoUpdater.checkForUpdatesAndNotify())
|
||||
// .catch(e => console.error('Failed check updates:', e));
|
||||
// }
|
||||
|
@ -7,7 +7,7 @@ export function getLogFilePath() {
|
||||
return log.transports.file.getFile().path;
|
||||
}
|
||||
|
||||
export function revealLogFile() {
|
||||
export async function revealLogFile() {
|
||||
const filePath = getLogFilePath();
|
||||
shell.showItemInFolder(filePath);
|
||||
return await shell.openPath(filePath);
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ import electronWindowState from 'electron-window-state';
|
||||
import { join } from 'path';
|
||||
|
||||
import { isMacOS, isWindows } from '../../utils';
|
||||
import { getExposedMeta } from './exposed';
|
||||
import { logger } from './logger';
|
||||
|
||||
const IS_DEV: boolean =
|
||||
@ -17,6 +18,8 @@ async function createWindow() {
|
||||
defaultHeight: 800,
|
||||
});
|
||||
|
||||
const exposedMeta = getExposedMeta();
|
||||
|
||||
const browserWindow = new BrowserWindow({
|
||||
titleBarStyle: isMacOS()
|
||||
? 'hiddenInset'
|
||||
@ -40,6 +43,8 @@ async function createWindow() {
|
||||
webviewTag: false, // The webview tag is not recommended. Consider alternatives like iframe or Electron's BrowserView. https://www.electronjs.org/docs/latest/api/webview-tag#warning
|
||||
spellcheck: false, // FIXME: enable?
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
// serialize exposed meta that to be used in preload
|
||||
additionalArguments: [`--exposed-meta=` + JSON.stringify(exposedMeta)],
|
||||
},
|
||||
});
|
||||
|
||||
|
18
apps/electron/layers/main/src/type.ts
Normal file
18
apps/electron/layers/main/src/type.ts
Normal file
@ -0,0 +1,18 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export interface WorkspaceMeta {
|
||||
id: string;
|
||||
mainDBPath: string;
|
||||
secondaryDBPath?: string; // assume there will be only one
|
||||
}
|
||||
|
||||
export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer';
|
@ -122,4 +122,4 @@ const getMetaData = async function (
|
||||
return metadata;
|
||||
};
|
||||
|
||||
export default getMetaData;
|
||||
export { getMetaData };
|
@ -1,7 +1,7 @@
|
||||
import { app, BrowserWindow, shell } from 'electron';
|
||||
import { parse } from 'url';
|
||||
|
||||
import { logger } from '../../logger';
|
||||
import { logger } from '../logger';
|
||||
|
||||
const redirectUri = 'https://affine.pro/client/auth-callback';
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { app, BrowserWindow, nativeTheme, session } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import { isMacOS } from '../../../utils';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import getMetaData from './getMetaData';
|
||||
import { getMetaData } from './get-meta-data';
|
||||
import { getGoogleOauthCode } from './google-auth';
|
||||
|
||||
export const uiHandlers = {
|
@ -2,9 +2,9 @@ import { app } from 'electron';
|
||||
import type { AppUpdater } from 'electron-updater';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import { updaterSubjects } from '../../events/updater';
|
||||
import { logger } from '../../logger';
|
||||
import { isMacOS } from '../../../utils';
|
||||
import { logger } from '../logger';
|
||||
import { updaterSubjects } from './event';
|
||||
|
||||
export const ReleaseTypeSchema = z.enum([
|
||||
'stable',
|
||||
@ -34,12 +34,11 @@ export const checkForUpdatesAndNotify = async (force = true) => {
|
||||
// check every 30 minutes (1800 seconds) at most
|
||||
if (force || lastCheckTime + 1000 * 1800 < Date.now()) {
|
||||
lastCheckTime = Date.now();
|
||||
return _autoUpdater.checkForUpdatesAndNotify();
|
||||
return await _autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
};
|
||||
|
||||
export const registerUpdater = async () => {
|
||||
// require it will cause some side effects and will break generate-main-exposed-meta,
|
||||
// so we wrap it in a function
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { autoUpdater } = require('electron-updater');
|
@ -1,6 +1,6 @@
|
||||
import { BehaviorSubject, Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
import type { MainEventListener } from '../type';
|
||||
|
||||
interface UpdateMeta {
|
||||
version: string;
|
@ -1,7 +1,7 @@
|
||||
import { app } from 'electron';
|
||||
import { app } from "electron";
|
||||
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { checkForUpdatesAndNotify, quitAndInstall } from './updater';
|
||||
import type { NamespaceHandlers } from "../type";
|
||||
import { checkForUpdatesAndNotify,quitAndInstall } from "./electron-updater";
|
||||
|
||||
export const updaterHandlers = {
|
||||
currentVersion: async () => {
|
||||
@ -15,4 +15,4 @@ export const updaterHandlers = {
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './updater';
|
||||
export * from "./electron-updater";
|
@ -1,19 +1,3 @@
|
||||
export function debounce<T extends (...args: any[]) => void>(
|
||||
fn: T,
|
||||
delay: number
|
||||
) {
|
||||
let timeoutId: NodeJS.Timer | undefined;
|
||||
return (...args: Parameters<T>) => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
timeoutId = setTimeout(() => {
|
||||
fn(...args);
|
||||
timeoutId = undefined;
|
||||
}, delay);
|
||||
};
|
||||
}
|
||||
|
||||
export function ts() {
|
||||
export function getTime() {
|
||||
return new Date().getTime();
|
||||
}
|
||||
|
136
apps/electron/layers/main/src/workspace/handlers.ts
Normal file
136
apps/electron/layers/main/src/workspace/handlers.ts
Normal file
@ -0,0 +1,136 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { type AppContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { logger } from '../logger';
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = getWorkspacesBasePath(context);
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
const metaList = (
|
||||
await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
// ? shall we put all meta in a single file instead of one file per workspace?
|
||||
return await getWorkspaceMeta(context, dir.name);
|
||||
})
|
||||
)
|
||||
).filter((w): w is WorkspaceMeta => !!w);
|
||||
return metaList.map(meta => [meta.id, meta]);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = getWorkspaceBasePath(context, id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
const db = await ensureSQLiteDB(id);
|
||||
db.destroy();
|
||||
// TODO: should remove DB connection first
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspacesBasePath(context: AppContext) {
|
||||
return path.join(context.appDataPath, 'workspaces');
|
||||
}
|
||||
|
||||
export function getWorkspaceBasePath(context: AppContext, workspaceId: string) {
|
||||
return path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
}
|
||||
|
||||
export function getWorkspaceDBPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export function getWorkspaceMetaPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'meta.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workspace meta, create one if not exists
|
||||
* This function will also migrate the workspace if needed
|
||||
*/
|
||||
export async function getWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
): Promise<WorkspaceMeta> {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
const metaPath = getWorkspaceMetaPath(context, workspaceId);
|
||||
if (!(await fs.exists(metaPath))) {
|
||||
// since not meta is found, we will migrate symlinked db file if needed
|
||||
await fs.ensureDir(basePath);
|
||||
const dbPath = getWorkspaceDBPath(context, workspaceId);
|
||||
|
||||
// todo: remove this after migration (in stable version)
|
||||
const realDBPath = (await fs.exists(dbPath))
|
||||
? await fs.realpath(dbPath)
|
||||
: dbPath;
|
||||
const isLink = realDBPath !== dbPath;
|
||||
if (isLink) {
|
||||
await fs.copy(realDBPath, dbPath);
|
||||
}
|
||||
// create one if not exists
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: dbPath,
|
||||
secondaryDBPath: isLink ? realDBPath : undefined,
|
||||
};
|
||||
await fs.writeJSON(metaPath, meta);
|
||||
return meta;
|
||||
} else {
|
||||
const meta = await fs.readJSON(metaPath);
|
||||
return meta;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('getWorkspaceMeta failed', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function storeWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string,
|
||||
meta: Partial<WorkspaceMeta>
|
||||
) {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
const metaPath = path.join(basePath, 'meta.json');
|
||||
const currentMeta = await getWorkspaceMeta(context, workspaceId);
|
||||
const newMeta = {
|
||||
...currentMeta,
|
||||
...meta,
|
||||
};
|
||||
await fs.writeJSON(metaPath, newMeta);
|
||||
workspaceSubjects.meta.next({
|
||||
workspaceId,
|
||||
meta: newMeta,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('storeWorkspaceMeta failed', err);
|
||||
}
|
||||
}
|
41
apps/electron/layers/main/src/workspace/index.ts
Normal file
41
apps/electron/layers/main/src/workspace/index.ts
Normal file
@ -0,0 +1,41 @@
|
||||
import { from, merge } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import type {
|
||||
MainEventListener,
|
||||
NamespaceHandlers,
|
||||
WorkspaceMeta,
|
||||
} from '../type';
|
||||
import { deleteWorkspace, getWorkspaceMeta, listWorkspaces } from './handlers';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export * from './handlers';
|
||||
export * from './subjects';
|
||||
|
||||
export const workspaceEvents = {
|
||||
onMetaChange: (
|
||||
fn: (meta: { workspaceId: string; meta: WorkspaceMeta }) => void
|
||||
) => {
|
||||
const sub = workspaceSubjects.meta.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
getMeta: async (_, id: string) => {
|
||||
return getWorkspaceMeta(appContext, id);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
// used internally. Get a stream of workspace id -> meta
|
||||
export const getWorkspaceMeta$ = (workspaceId: string) => {
|
||||
return merge(
|
||||
from(getWorkspaceMeta(appContext, workspaceId)),
|
||||
workspaceSubjects.meta.pipe(map(meta => meta.meta))
|
||||
);
|
||||
};
|
7
apps/electron/layers/main/src/workspace/subjects.ts
Normal file
7
apps/electron/layers/main/src/workspace/subjects.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
|
||||
export const workspaceSubjects = {
|
||||
meta: new Subject<{ workspaceId: string; meta: WorkspaceMeta }>(),
|
||||
};
|
@ -24,17 +24,17 @@ type MainExposedMeta = {
|
||||
events: [namespace: string, eventNames: string[]][];
|
||||
};
|
||||
|
||||
const meta: MainExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
// main handlers that can be invoked from the renderer process
|
||||
const apis: PreloadHandlers = (() => {
|
||||
// the following were generated by the build script
|
||||
// 1. bundle extra main/src/expose.ts entry
|
||||
// 2. use generate-main-exposed-meta.mjs to generate exposed-meta.js in dist
|
||||
//
|
||||
// we cannot directly import main/src/handlers.ts because it will be bundled into the preload bundle
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const {
|
||||
handlers: handlersMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
const { handlers: handlersMeta } = meta;
|
||||
|
||||
const all = handlersMeta.map(([namespace, functionNames]) => {
|
||||
const namespaceApis = functionNames.map(name => {
|
||||
@ -54,9 +54,7 @@ const apis: PreloadHandlers = (() => {
|
||||
|
||||
// main events that can be listened to from the renderer process
|
||||
const events: MainIPCEventMap = (() => {
|
||||
const {
|
||||
events: eventsMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
const { events: eventsMeta } = meta;
|
||||
|
||||
// NOTE: ui may try to listen to a lot of the same events, so we increase the limit...
|
||||
ipcRenderer.setMaxListeners(100);
|
||||
|
@ -15,7 +15,6 @@
|
||||
"prod": "yarn electron-rebuild && yarn node scripts/dev.mjs",
|
||||
"build-layers": "zx scripts/build-layers.mjs",
|
||||
"generate-assets": "zx scripts/generate-assets.mjs",
|
||||
"generate-main-exposed-meta": "zx scripts/generate-main-exposed-meta.mjs",
|
||||
"package": "electron-forge package",
|
||||
"make": "electron-forge make",
|
||||
"rebuild:for-unit-test": "yarn rebuild better-sqlite3",
|
||||
@ -60,6 +59,7 @@
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"chokidar": "^3.5.3",
|
||||
"electron-updater": "^5.3.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"nanoid": "^4.0.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"yjs": "^13.6.1"
|
||||
|
@ -25,8 +25,6 @@ async function buildLayers() {
|
||||
'process.env.BUILD_TYPE': `"${process.env.BUILD_TYPE || 'stable'}"`,
|
||||
},
|
||||
});
|
||||
|
||||
await $`yarn workspace @affine/electron generate-main-exposed-meta`;
|
||||
}
|
||||
|
||||
await buildLayers();
|
||||
|
@ -31,10 +31,7 @@ export const config = () => {
|
||||
|
||||
return {
|
||||
main: {
|
||||
entryPoints: [
|
||||
resolve(root, './layers/main/src/index.ts'),
|
||||
resolve(root, './layers/main/src/exposed.ts'),
|
||||
],
|
||||
entryPoints: [resolve(root, './layers/main/src/index.ts')],
|
||||
outdir: resolve(root, './dist/layers/main'),
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
@ -54,7 +51,7 @@ export const config = () => {
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', '../main/exposed-meta'],
|
||||
external: ['electron'],
|
||||
define: define,
|
||||
},
|
||||
};
|
||||
|
@ -1,5 +1,5 @@
|
||||
/* eslint-disable no-async-promise-executor */
|
||||
import { execSync, spawn } from 'node:child_process';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
@ -105,8 +105,6 @@ async function watchMain() {
|
||||
name: 'electron-dev:reload-app-on-main-change',
|
||||
setup(build) {
|
||||
build.onEnd(() => {
|
||||
execSync('yarn generate-main-exposed-meta');
|
||||
|
||||
if (initialBuild) {
|
||||
console.log(`[main] has changed, [re]launching electron...`);
|
||||
spawnOrReloadElectron();
|
||||
|
@ -1,36 +0,0 @@
|
||||
const mainDistDir = path.resolve(__dirname, '../dist/layers/main');
|
||||
|
||||
// be careful and avoid any side effects in
|
||||
const { handlers, events } = await import(
|
||||
'file://' + path.resolve(mainDistDir, 'exposed.js')
|
||||
);
|
||||
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const meta = {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
path.resolve(mainDistDir, 'exposed-meta.js'),
|
||||
`module.exports = ${JSON.stringify(meta)};`
|
||||
);
|
||||
|
||||
console.log('generate main exposed-meta.js done');
|
@ -23,7 +23,7 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
// goto settings
|
||||
await settingButton.click();
|
||||
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp-dir');
|
||||
|
||||
// move db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
@ -36,6 +36,9 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
// check if db file exists
|
||||
await page.waitForSelector('text="Move folder success"');
|
||||
expect(await fs.exists(tmpPath)).toBe(true);
|
||||
// check if db file exists under tmpPath (a file ends with .affine)
|
||||
const files = await fs.readdir(tmpPath);
|
||||
expect(files.some(f => f.endsWith('.affine'))).toBe(true);
|
||||
});
|
||||
|
||||
test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
@ -56,7 +59,7 @@ test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
|
||||
|
||||
// move db file to tmp folder
|
||||
// export db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
window.apis?.dialog.setFakeDialogResult({
|
||||
filePath: tmpPath,
|
||||
|
@ -9,7 +9,8 @@
|
||||
"types": ["node"],
|
||||
"outDir": "dist",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true
|
||||
"resolveJsonModule": true,
|
||||
"noImplicitOverride": true
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.tsx", "package.json"],
|
||||
"exclude": ["out", "dist", "node_modules"],
|
||||
|
@ -25,24 +25,22 @@ import { CameraIcon } from './icons';
|
||||
import { WorkspaceLeave } from './leave';
|
||||
import { StyledInput } from './style';
|
||||
|
||||
const useDBFilePathMeta = (workspaceId: string) => {
|
||||
const [meta, setMeta] = useState<{
|
||||
path: string;
|
||||
realPath: string;
|
||||
}>();
|
||||
const useShowOpenDBFile = (workspaceId: string) => {
|
||||
const [show, setShow] = useState(false);
|
||||
useEffect(() => {
|
||||
if (window.apis && window.events) {
|
||||
window.apis.db.getDBFilePath(workspaceId).then(meta => {
|
||||
setMeta(meta);
|
||||
if (window.apis && window.events && environment.isDesktop) {
|
||||
window.apis.workspace.getMeta(workspaceId).then(meta => {
|
||||
setShow(!!meta.secondaryDBPath);
|
||||
});
|
||||
return window.events.db.onDBFilePathChange(meta => {
|
||||
if (meta.workspaceId === workspaceId) {
|
||||
setMeta(meta);
|
||||
return window.events.workspace.onMetaChange(newMeta => {
|
||||
if (newMeta.workspaceId === workspaceId) {
|
||||
const meta = newMeta.meta;
|
||||
setShow(!!meta.secondaryDBPath);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [workspaceId]);
|
||||
return meta;
|
||||
return show;
|
||||
};
|
||||
|
||||
export const GeneralPanel: React.FC<PanelProps> = ({
|
||||
@ -58,9 +56,7 @@ export const GeneralPanel: React.FC<PanelProps> = ({
|
||||
const isOwner = useIsWorkspaceOwner(workspace);
|
||||
const t = useAFFiNEI18N();
|
||||
|
||||
const dbPathMeta = useDBFilePathMeta(workspace.id);
|
||||
const showOpenFolder =
|
||||
environment.isDesktop && dbPathMeta?.path !== dbPathMeta?.realPath;
|
||||
const showOpenFolder = useShowOpenDBFile(workspace.id);
|
||||
|
||||
const handleUpdateWorkspaceName = (name: string) => {
|
||||
setName(name);
|
||||
|
@ -5,7 +5,7 @@ import {
|
||||
TableHead,
|
||||
TableRow,
|
||||
} from '@affine/component';
|
||||
import { DEFAULT_SORT_KEY } from "@affine/env/constant";
|
||||
import { DEFAULT_SORT_KEY } from '@affine/env/constant';
|
||||
import { useAFFiNEI18N } from '@affine/i18n/hooks';
|
||||
import { ArrowDownBigIcon, ArrowUpBigIcon } from '@blocksuite/icons';
|
||||
import { useMediaQuery, useTheme } from '@mui/material';
|
||||
|
@ -1,5 +1,4 @@
|
||||
module.exports.createFSWatcher = function createFSWatcher() {
|
||||
// require it in the function level so that it won't break the `generate-main-exposed-meta.mjs`
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { FsWatcher } = require('./index');
|
||||
return FsWatcher;
|
||||
|
@ -22,7 +22,7 @@ export const createSQLiteStorage = (workspaceId: string): BlobStorage => {
|
||||
return apis.db.deleteBlob(workspaceId, key);
|
||||
},
|
||||
list: async () => {
|
||||
return apis.db.getPersistedBlobs(workspaceId);
|
||||
return apis.db.getBlobKeys(workspaceId);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -14,7 +14,9 @@ let provider: SQLiteProvider;
|
||||
|
||||
let offlineYdoc: YType.Doc;
|
||||
|
||||
let triggerDBUpdate: ((_: string) => void) | null = null;
|
||||
let triggerDBUpdate:
|
||||
| Parameters<typeof window.events.db.onExternalUpdate>[0]
|
||||
| null = null;
|
||||
|
||||
const mockedAddBlob = vi.fn();
|
||||
|
||||
@ -27,7 +29,7 @@ vi.stubGlobal('window', {
|
||||
applyDocUpdate: async (id: string, update: Uint8Array) => {
|
||||
Y.applyUpdate(offlineYdoc, update, 'sqlite');
|
||||
},
|
||||
getPersistedBlobs: async () => {
|
||||
getBlobKeys: async () => {
|
||||
// todo: may need to hack the way to get hash keys of blobs
|
||||
return [];
|
||||
},
|
||||
@ -36,20 +38,12 @@ vi.stubGlobal('window', {
|
||||
},
|
||||
events: {
|
||||
db: {
|
||||
onDBFileUpdate: (fn: (id: string) => void) => {
|
||||
onExternalUpdate: fn => {
|
||||
triggerDBUpdate = fn;
|
||||
return () => {
|
||||
triggerDBUpdate = null;
|
||||
};
|
||||
},
|
||||
|
||||
// not used in this test
|
||||
onDBFileMissing: () => {
|
||||
return () => {};
|
||||
},
|
||||
onDBFilePathChange: () => {
|
||||
return () => {};
|
||||
},
|
||||
},
|
||||
} satisfies Partial<NonNullable<typeof window.events>>,
|
||||
});
|
||||
@ -111,23 +105,26 @@ describe('SQLite provider', () => {
|
||||
});
|
||||
|
||||
test('on db update', async () => {
|
||||
vi.useFakeTimers();
|
||||
await provider.connect();
|
||||
|
||||
offlineYdoc.getText('text').insert(0, 'sqlite-world');
|
||||
|
||||
triggerDBUpdate?.(id);
|
||||
triggerDBUpdate?.({
|
||||
workspaceId: id + '-another-id',
|
||||
update: Y.encodeStateAsUpdate(offlineYdoc),
|
||||
});
|
||||
|
||||
// not yet updated
|
||||
expect(workspace.doc.getText('text').toString()).toBe('sqlite-hello');
|
||||
|
||||
// wait for the update to be sync'ed
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
triggerDBUpdate?.({
|
||||
workspaceId: id,
|
||||
update: Y.encodeStateAsUpdate(offlineYdoc),
|
||||
});
|
||||
|
||||
expect(workspace.doc.getText('text').toString()).toBe(
|
||||
'sqlite-worldsqlite-hello'
|
||||
);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
test('disconnect handlers', async () => {
|
||||
|
@ -176,9 +176,7 @@ const createSQLiteProvider = (
|
||||
}
|
||||
|
||||
async function syncBlobIntoSQLite(bs: BlobManager) {
|
||||
const persistedKeys = await apis.db.getPersistedBlobs(
|
||||
blockSuiteWorkspace.id
|
||||
);
|
||||
const persistedKeys = await apis.db.getBlobKeys(blockSuiteWorkspace.id);
|
||||
|
||||
const allKeys = await bs.list();
|
||||
const keysToPersist = allKeys.filter(k => !persistedKeys.includes(k));
|
||||
@ -242,20 +240,9 @@ const createSQLiteProvider = (
|
||||
|
||||
blockSuiteWorkspace.doc.on('update', handleUpdate);
|
||||
|
||||
let timer = 0;
|
||||
unsubscribe = events.db.onDBFileUpdate(workspaceId => {
|
||||
unsubscribe = events.db.onExternalUpdate(({ update, workspaceId }) => {
|
||||
if (workspaceId === blockSuiteWorkspace.id) {
|
||||
// throttle
|
||||
logger.debug('on db update', workspaceId);
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
|
||||
// @ts-expect-error ignore the type
|
||||
timer = setTimeout(() => {
|
||||
syncUpdates();
|
||||
timer = 0;
|
||||
}, 1000);
|
||||
Y.applyUpdate(blockSuiteWorkspace.doc, update, sqliteOrigin);
|
||||
}
|
||||
});
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user