fix: add DB migration to add workspace (#3115)

This commit is contained in:
Peng Xiao 2023-07-10 16:03:18 +08:00 committed by GitHub
parent 812e0e9c9a
commit dac4e390aa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 213 additions and 28 deletions

View File

@ -24,6 +24,7 @@
"main": "./dist/main.js", "main": "./dist/main.js",
"devDependencies": { "devDependencies": {
"@affine-test/kit": "workspace:*", "@affine-test/kit": "workspace:*",
"@affine/env": "workspace:*",
"@affine/native": "workspace:*", "@affine/native": "workspace:*",
"@blocksuite/blocks": "0.0.0-20230705162600-2cb608e4-nightly", "@blocksuite/blocks": "0.0.0-20230705162600-2cb608e4-nightly",
"@blocksuite/editor": "0.0.0-20230705162600-2cb608e4-nightly", "@blocksuite/editor": "0.0.0-20230705162600-2cb608e4-nightly",

View File

@ -38,7 +38,12 @@ export const config = () => {
bundle: true, bundle: true,
target: `node${NODE_MAJOR_VERSION}`, target: `node${NODE_MAJOR_VERSION}`,
platform: 'node', platform: 'node',
external: ['electron', 'electron-updater', '@toeverything/plugin-infra'], external: [
'electron',
'electron-updater',
'@toeverything/plugin-infra',
'yjs',
],
define: define, define: define,
format: 'cjs', format: 'cjs',
loader: { loader: {

View File

@ -0,0 +1,69 @@
import path from 'node:path';
import { SqliteConnection } from '@affine/native';
import { afterEach, describe, expect, it, vi } from 'vitest';
import * as Y from 'yjs';
import { removeWithRetry } from '../../../../tests/utils';
import { copyToTemp, migrateToSubdocAndReplaceDatabase } from '../migration';
const tmpDir = path.join(__dirname, 'tmp');
const testDBFilePath = path.resolve(__dirname, 'old-db.affine');
const appDataPath = path.join(tmpDir, 'app-data');
vi.mock('../../main-rpc', () => ({
mainRPC: {
getPath: async () => appDataPath,
},
}));
afterEach(async () => {
await removeWithRetry(tmpDir);
});
describe('migrateToSubdocAndReplaceDatabase', () => {
it('should migrate and replace the database', async () => {
const copiedDbFilePath = await copyToTemp(testDBFilePath);
await migrateToSubdocAndReplaceDatabase(copiedDbFilePath);
const db = new SqliteConnection(copiedDbFilePath);
await db.connect();
// check if db has two rows, one for root doc and one for subdoc
const rows = await db.getAllUpdates();
expect(rows.length).toBe(2);
const rootUpdate = rows.find(row => row.docId === undefined)!.data;
const subdocUpdate = rows.find(row => row.docId !== undefined)!.data;
expect(rootUpdate).toBeDefined();
expect(subdocUpdate).toBeDefined();
// apply updates
const rootDoc = new Y.Doc();
Y.applyUpdate(rootDoc, rootUpdate);
// check if root doc has one subdoc
expect(rootDoc.subdocs.size).toBe(1);
// populates subdoc
Y.applyUpdate(rootDoc.subdocs.values().next().value, subdocUpdate);
// check if root doc's meta is correct
const meta = rootDoc.getMap('meta').toJSON();
expect(meta.workspaceVersion).toBe(1);
expect(meta.name).toBe('hiw');
expect(meta.pages.length).toBe(1);
const pageMeta = meta.pages[0];
expect(pageMeta.title).toBe('Welcome to AFFiNEd');
// get the subdoc through id
const subDoc = rootDoc
.getMap('spaces')
.get(`space:${pageMeta.id}`) as Y.Doc;
expect(subDoc).toEqual(rootDoc.subdocs.values().next().value);
await db.close();
});
});

Binary file not shown.

View File

@ -0,0 +1,55 @@
import { resolve } from 'node:path';
import { migrateToSubdoc } from '@affine/env/blocksuite';
import { SqliteConnection } from '@affine/native';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import * as Y from 'yjs';
import { mainRPC } from '../main-rpc';
export const migrateToSubdocAndReplaceDatabase = async (path: string) => {
const db = new SqliteConnection(path);
await db.connect();
const rows = await db.getAllUpdates();
const originalDoc = new Y.Doc();
// 1. apply all updates to the root doc
rows.forEach(row => {
Y.applyUpdate(originalDoc, row.data);
});
// 2. migrate using migrateToSubdoc
const migratedDoc = migrateToSubdoc(originalDoc);
// 3. replace db rows with the migrated doc
await replaceRows(db, migratedDoc, true);
// 4. close db
await db.close();
};
export const copyToTemp = async (path: string) => {
const tmpDirPath = resolve(await mainRPC.getPath('sessionData'), 'tmp');
const tmpFilePath = resolve(tmpDirPath, nanoid());
await fs.ensureDir(tmpDirPath);
await fs.copyFile(path, tmpFilePath);
return tmpFilePath;
};
async function replaceRows(
db: SqliteConnection,
doc: Y.Doc,
isRoot: boolean
): Promise<void> {
const migratedUpdates = Y.encodeStateAsUpdate(doc);
const docId = isRoot ? undefined : doc.guid;
const rows = [{ data: migratedUpdates, docId: docId }];
await db.replaceUpdates(docId, rows);
await Promise.all(
[...doc.subdocs].map(async subdoc => {
await replaceRows(db, subdoc, false);
})
);
}

View File

@ -1,9 +1,11 @@
import path from 'node:path'; import path from 'node:path';
import { ValidationResult } from '@affine/native';
import fs from 'fs-extra'; import fs from 'fs-extra';
import { nanoid } from 'nanoid'; import { nanoid } from 'nanoid';
import { ensureSQLiteDB } from '../db/ensure-db'; import { ensureSQLiteDB } from '../db/ensure-db';
import { copyToTemp, migrateToSubdocAndReplaceDatabase } from '../db/migration';
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter'; import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
import { logger } from '../logger'; import { logger } from '../logger';
import { mainRPC } from '../main-rpc'; import { mainRPC } from '../main-rpc';
@ -55,6 +57,7 @@ const ErrorMessages = [
'DB_FILE_ALREADY_LOADED', 'DB_FILE_ALREADY_LOADED',
'DB_FILE_PATH_INVALID', 'DB_FILE_PATH_INVALID',
'DB_FILE_INVALID', 'DB_FILE_INVALID',
'DB_FILE_MIGRATION_FAILED',
'FILE_ALREADY_EXISTS', 'FILE_ALREADY_EXISTS',
'UNKNOWN_ERROR', 'UNKNOWN_ERROR',
] as const; ] as const;
@ -191,27 +194,42 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
], ],
message: 'Load Workspace from a AFFiNE file', message: 'Load Workspace from a AFFiNE file',
})); }));
const filePath = ret.filePaths?.[0]; let originalPath = ret.filePaths?.[0];
if (ret.canceled || !filePath) { if (ret.canceled || !originalPath) {
logger.info('loadDBFile canceled'); logger.info('loadDBFile canceled');
return { canceled: true }; return { canceled: true };
} }
// the imported file should not be in app data dir // the imported file should not be in app data dir
if (filePath.startsWith(await getWorkspacesBasePath())) { if (originalPath.startsWith(await getWorkspacesBasePath())) {
logger.warn('loadDBFile: db file in app data dir'); logger.warn('loadDBFile: db file in app data dir');
return { error: 'DB_FILE_PATH_INVALID' }; return { error: 'DB_FILE_PATH_INVALID' };
} }
if (await dbFileAlreadyLoaded(filePath)) { if (await dbFileAlreadyLoaded(originalPath)) {
logger.warn('loadDBFile: db file already loaded'); logger.warn('loadDBFile: db file already loaded');
return { error: 'DB_FILE_ALREADY_LOADED' }; return { error: 'DB_FILE_ALREADY_LOADED' };
} }
const { SqliteConnection } = await import('@affine/native'); const { SqliteConnection } = await import('@affine/native');
if (!(await SqliteConnection.validate(filePath))) { const validationResult = await SqliteConnection.validate(originalPath);
// TODO: report invalid db file error?
if (validationResult === ValidationResult.MissingDocIdColumn) {
try {
const tmpDBPath = await copyToTemp(originalPath);
await migrateToSubdocAndReplaceDatabase(tmpDBPath);
originalPath = tmpDBPath;
} catch (error) {
logger.warn(`loadDBFile, migration failed: ${originalPath}`, error);
return { error: 'DB_FILE_MIGRATION_FAILED' };
}
}
if (
validationResult !== ValidationResult.MissingDocIdColumn &&
validationResult !== ValidationResult.Valid
) {
return { error: 'DB_FILE_INVALID' }; // invalid db file return { error: 'DB_FILE_INVALID' }; // invalid db file
} }
@ -220,9 +238,8 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
const internalFilePath = await getWorkspaceDBPath(workspaceId); const internalFilePath = await getWorkspaceDBPath(workspaceId);
await fs.ensureDir(await getWorkspacesBasePath()); await fs.ensureDir(await getWorkspacesBasePath());
await fs.copy(originalPath, internalFilePath);
await fs.copy(filePath, internalFilePath); logger.info(`loadDBFile, copy: ${originalPath} -> ${internalFilePath}`);
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
await storeWorkspaceMeta(workspaceId, { await storeWorkspaceMeta(workspaceId, {
id: workspaceId, id: workspaceId,

View File

@ -25,6 +25,9 @@
{ {
"path": "../../packages/infra" "path": "../../packages/infra"
}, },
{
"path": "../../packages/env"
},
// Tests // Tests
{ {

View File

@ -313,6 +313,7 @@
"Move folder hint": "Select a new storage location.", "Move folder hint": "Select a new storage location.",
"Storage Folder": "Storage Folder", "Storage Folder": "Storage Folder",
"DB_FILE_INVALID": "Invalid Database file", "DB_FILE_INVALID": "Invalid Database file",
"DB_FILE_MIGRATION_FAILED": "Database file migration failed",
"Name Your Workspace": "Name Your Workspace", "Name Your Workspace": "Name Your Workspace",
"Change avatar hint": "New avatar will be shown for everyone.", "Change avatar hint": "New avatar will be shown for everyone.",
"Change workspace name hint": "New name will be shown for everyone.", "Change workspace name hint": "New name will be shown for everyone.",

View File

@ -7,7 +7,7 @@ export interface WatchOptions {
recursive?: boolean; recursive?: boolean;
} }
/** Watcher kind enumeration */ /** Watcher kind enumeration */
export const enum WatcherKind { export enum WatcherKind {
/** inotify backend (linux) */ /** inotify backend (linux) */
Inotify = 'Inotify', Inotify = 'Inotify',
/** FS-Event backend (mac) */ /** FS-Event backend (mac) */
@ -38,6 +38,12 @@ export interface InsertRow {
docId?: string; docId?: string;
data: Uint8Array; data: Uint8Array;
} }
export enum ValidationResult {
MissingTables = 0,
MissingDocIdColumn = 1,
GeneralError = 2,
Valid = 3,
}
export class Subscription { export class Subscription {
toString(): string; toString(): string;
unsubscribe(): void; unsubscribe(): void;
@ -71,5 +77,6 @@ export class SqliteConnection {
): Promise<void>; ): Promise<void>;
close(): Promise<void>; close(): Promise<void>;
get isClose(): boolean; get isClose(): boolean;
static validate(path: string): Promise<boolean>; static validate(path: string): Promise<ValidationResult>;
migrateAddDocId(): Promise<void>;
} }

View File

@ -263,11 +263,18 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`); throw new Error(`Failed to load native binding`);
} }
const { WatcherKind, Subscription, FsWatcher, moveFile, SqliteConnection } = const {
nativeBinding; WatcherKind,
Subscription,
FsWatcher,
moveFile,
SqliteConnection,
ValidationResult,
} = nativeBinding;
module.exports.WatcherKind = WatcherKind; module.exports.WatcherKind = WatcherKind;
module.exports.Subscription = Subscription; module.exports.Subscription = Subscription;
module.exports.FsWatcher = FsWatcher; module.exports.FsWatcher = FsWatcher;
module.exports.moveFile = moveFile; module.exports.moveFile = moveFile;
module.exports.SqliteConnection = SqliteConnection; module.exports.SqliteConnection = SqliteConnection;
module.exports.ValidationResult = ValidationResult;

View File

@ -11,6 +11,9 @@
"aarch64-unknown-linux-gnu", "aarch64-unknown-linux-gnu",
"aarch64-pc-windows-msvc" "aarch64-pc-windows-msvc"
] ]
},
"ts": {
"constEnum": false
} }
}, },
"license": "MPL-2.0", "license": "MPL-2.0",
@ -29,8 +32,8 @@
}, },
"scripts": { "scripts": {
"artifacts": "napi artifacts", "artifacts": "napi artifacts",
"build": "napi build --platform --release", "build": "napi build --platform --release --no-const-enum",
"build:debug": "napi build --platform", "build:debug": "napi build --platform --no-const-enum",
"universal": "napi universal", "universal": "napi universal",
"test": "cross-env TS_NODE_TRANSPILE_ONLY=1 TS_NODE_PROJECT=./tsconfig.json node --test --loader ts-node/esm --experimental-specifier-resolution=node ./__tests__/**/*.mts", "test": "cross-env TS_NODE_TRANSPILE_ONLY=1 TS_NODE_PROJECT=./tsconfig.json node --test --loader ts-node/esm --experimental-specifier-resolution=node ./__tests__/**/*.mts",
"version": "napi version" "version": "napi version"

View File

@ -1,5 +1,5 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use napi::bindgen_prelude::{Buffer, Uint8Array}; use napi::bindgen_prelude::{Buffer, FromNapiValue, ToNapiValue, Uint8Array};
use napi_derive::napi; use napi_derive::napi;
use sqlx::{ use sqlx::{
migrate::MigrateDatabase, migrate::MigrateDatabase,
@ -34,6 +34,14 @@ pub struct SqliteConnection {
path: String, path: String,
} }
#[napi]
pub enum ValidationResult {
MissingTables,
MissingDocIdColumn,
GeneralError,
Valid,
}
#[napi] #[napi]
impl SqliteConnection { impl SqliteConnection {
#[napi(constructor)] #[napi(constructor)]
@ -231,14 +239,14 @@ impl SqliteConnection {
} }
#[napi] #[napi]
pub async fn validate(path: String) -> bool { pub async fn validate(path: String) -> ValidationResult {
let pool = match SqlitePoolOptions::new() let pool = match SqlitePoolOptions::new()
.max_connections(1) .max_connections(1)
.connect(&path) .connect(&path)
.await .await
{ {
Ok(pool) => pool, Ok(pool) => pool,
Err(_) => return false, Err(_) => return ValidationResult::GeneralError,
}; };
let tables_res = sqlx::query("SELECT name FROM sqlite_master WHERE type='table'") let tables_res = sqlx::query("SELECT name FROM sqlite_master WHERE type='table'")
@ -250,26 +258,32 @@ impl SqliteConnection {
let names: Vec<String> = res.iter().map(|row| row.get(0)).collect(); let names: Vec<String> = res.iter().map(|row| row.get(0)).collect();
names.contains(&"updates".to_string()) && names.contains(&"blobs".to_string()) names.contains(&"updates".to_string()) && names.contains(&"blobs".to_string())
} }
Err(_) => return false, Err(_) => return ValidationResult::GeneralError,
}; };
let columns_res = sqlx::query("PRAGMA table_info(updates)") let columns_res = sqlx::query("PRAGMA table_info(updates)")
.fetch_all(&pool) .fetch_all(&pool)
.await; .await;
let columns_exist = match columns_res { let doc_id_exist = match columns_res {
Ok(res) => { Ok(res) => {
let names: Vec<String> = res.iter().map(|row| row.get(1)).collect(); let names: Vec<String> = res.iter().map(|row| row.get(1)).collect();
names.contains(&"data".to_string()) && names.contains(&"doc_id".to_string()) names.contains(&"doc_id".to_string())
} }
Err(_) => return false, Err(_) => return ValidationResult::GeneralError,
}; };
tables_exist && columns_exist if !tables_exist {
ValidationResult::MissingTables
} else if !doc_id_exist {
ValidationResult::MissingDocIdColumn
} else {
ValidationResult::Valid
}
} }
// todo: have a better way to handle migration #[napi]
async fn migrate_add_doc_id(&self) -> Result<(), anyhow::Error> { pub async fn migrate_add_doc_id(&self) -> napi::Result<()> {
// ignore errors // ignore errors
match sqlx::query("ALTER TABLE updates ADD COLUMN doc_id TEXT") match sqlx::query("ALTER TABLE updates ADD COLUMN doc_id TEXT")
.execute(&self.pool) .execute(&self.pool)
@ -280,7 +294,7 @@ impl SqliteConnection {
if err.to_string().contains("duplicate column name") { if err.to_string().contains("duplicate column name") {
Ok(()) // Ignore error if it's due to duplicate column Ok(()) // Ignore error if it's due to duplicate column
} else { } else {
Err(anyhow::Error::from(err)) // Propagate other errors Err(anyhow::Error::from(err).into()) // Propagate other errors
} }
} }
} }

View File

@ -1,4 +1,4 @@
import { resolve } from 'node:path'; import path, { resolve } from 'node:path';
import { fileURLToPath } from 'node:url'; import { fileURLToPath } from 'node:url';
import { vanillaExtractPlugin } from '@vanilla-extract/vite-plugin'; import { vanillaExtractPlugin } from '@vanilla-extract/vite-plugin';
@ -15,6 +15,8 @@ export default defineConfig({
alias: { alias: {
'next/router': 'next-router-mock', 'next/router': 'next-router-mock',
'next/config': resolve(rootDir, './scripts/vitest/next-config-mock.ts'), 'next/config': resolve(rootDir, './scripts/vitest/next-config-mock.ts'),
// prevent tests using two different sources of yjs
yjs: path.resolve(__dirname, 'node_modules/yjs'),
}, },
}, },
define: { define: {

View File

@ -211,6 +211,7 @@ __metadata:
resolution: "@affine/electron@workspace:apps/electron" resolution: "@affine/electron@workspace:apps/electron"
dependencies: dependencies:
"@affine-test/kit": "workspace:*" "@affine-test/kit": "workspace:*"
"@affine/env": "workspace:*"
"@affine/native": "workspace:*" "@affine/native": "workspace:*"
"@blocksuite/blocks": 0.0.0-20230705162600-2cb608e4-nightly "@blocksuite/blocks": 0.0.0-20230705162600-2cb608e4-nightly
"@blocksuite/editor": 0.0.0-20230705162600-2cb608e4-nightly "@blocksuite/editor": 0.0.0-20230705162600-2cb608e4-nightly