fix: add DB migration to add workspace (#3115)

This commit is contained in:
Peng Xiao 2023-07-10 16:03:18 +08:00 committed by GitHub
parent 812e0e9c9a
commit dac4e390aa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 213 additions and 28 deletions

View File

@ -24,6 +24,7 @@
"main": "./dist/main.js",
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/env": "workspace:*",
"@affine/native": "workspace:*",
"@blocksuite/blocks": "0.0.0-20230705162600-2cb608e4-nightly",
"@blocksuite/editor": "0.0.0-20230705162600-2cb608e4-nightly",

View File

@ -38,7 +38,12 @@ export const config = () => {
bundle: true,
target: `node${NODE_MAJOR_VERSION}`,
platform: 'node',
external: ['electron', 'electron-updater', '@toeverything/plugin-infra'],
external: [
'electron',
'electron-updater',
'@toeverything/plugin-infra',
'yjs',
],
define: define,
format: 'cjs',
loader: {

View File

@ -0,0 +1,69 @@
import path from 'node:path';
import { SqliteConnection } from '@affine/native';
import { afterEach, describe, expect, it, vi } from 'vitest';
import * as Y from 'yjs';
import { removeWithRetry } from '../../../../tests/utils';
import { copyToTemp, migrateToSubdocAndReplaceDatabase } from '../migration';
const tmpDir = path.join(__dirname, 'tmp');
const testDBFilePath = path.resolve(__dirname, 'old-db.affine');
const appDataPath = path.join(tmpDir, 'app-data');
vi.mock('../../main-rpc', () => ({
mainRPC: {
getPath: async () => appDataPath,
},
}));
afterEach(async () => {
await removeWithRetry(tmpDir);
});
describe('migrateToSubdocAndReplaceDatabase', () => {
it('should migrate and replace the database', async () => {
const copiedDbFilePath = await copyToTemp(testDBFilePath);
await migrateToSubdocAndReplaceDatabase(copiedDbFilePath);
const db = new SqliteConnection(copiedDbFilePath);
await db.connect();
// check if db has two rows, one for root doc and one for subdoc
const rows = await db.getAllUpdates();
expect(rows.length).toBe(2);
const rootUpdate = rows.find(row => row.docId === undefined)!.data;
const subdocUpdate = rows.find(row => row.docId !== undefined)!.data;
expect(rootUpdate).toBeDefined();
expect(subdocUpdate).toBeDefined();
// apply updates
const rootDoc = new Y.Doc();
Y.applyUpdate(rootDoc, rootUpdate);
// check if root doc has one subdoc
expect(rootDoc.subdocs.size).toBe(1);
// populates subdoc
Y.applyUpdate(rootDoc.subdocs.values().next().value, subdocUpdate);
// check if root doc's meta is correct
const meta = rootDoc.getMap('meta').toJSON();
expect(meta.workspaceVersion).toBe(1);
expect(meta.name).toBe('hiw');
expect(meta.pages.length).toBe(1);
const pageMeta = meta.pages[0];
expect(pageMeta.title).toBe('Welcome to AFFiNEd');
// get the subdoc through id
const subDoc = rootDoc
.getMap('spaces')
.get(`space:${pageMeta.id}`) as Y.Doc;
expect(subDoc).toEqual(rootDoc.subdocs.values().next().value);
await db.close();
});
});

Binary file not shown.

View File

@ -0,0 +1,55 @@
import { resolve } from 'node:path';
import { migrateToSubdoc } from '@affine/env/blocksuite';
import { SqliteConnection } from '@affine/native';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import * as Y from 'yjs';
import { mainRPC } from '../main-rpc';
export const migrateToSubdocAndReplaceDatabase = async (path: string) => {
const db = new SqliteConnection(path);
await db.connect();
const rows = await db.getAllUpdates();
const originalDoc = new Y.Doc();
// 1. apply all updates to the root doc
rows.forEach(row => {
Y.applyUpdate(originalDoc, row.data);
});
// 2. migrate using migrateToSubdoc
const migratedDoc = migrateToSubdoc(originalDoc);
// 3. replace db rows with the migrated doc
await replaceRows(db, migratedDoc, true);
// 4. close db
await db.close();
};
export const copyToTemp = async (path: string) => {
const tmpDirPath = resolve(await mainRPC.getPath('sessionData'), 'tmp');
const tmpFilePath = resolve(tmpDirPath, nanoid());
await fs.ensureDir(tmpDirPath);
await fs.copyFile(path, tmpFilePath);
return tmpFilePath;
};
async function replaceRows(
db: SqliteConnection,
doc: Y.Doc,
isRoot: boolean
): Promise<void> {
const migratedUpdates = Y.encodeStateAsUpdate(doc);
const docId = isRoot ? undefined : doc.guid;
const rows = [{ data: migratedUpdates, docId: docId }];
await db.replaceUpdates(docId, rows);
await Promise.all(
[...doc.subdocs].map(async subdoc => {
await replaceRows(db, subdoc, false);
})
);
}

View File

@ -1,9 +1,11 @@
import path from 'node:path';
import { ValidationResult } from '@affine/native';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import { ensureSQLiteDB } from '../db/ensure-db';
import { copyToTemp, migrateToSubdocAndReplaceDatabase } from '../db/migration';
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
import { logger } from '../logger';
import { mainRPC } from '../main-rpc';
@ -55,6 +57,7 @@ const ErrorMessages = [
'DB_FILE_ALREADY_LOADED',
'DB_FILE_PATH_INVALID',
'DB_FILE_INVALID',
'DB_FILE_MIGRATION_FAILED',
'FILE_ALREADY_EXISTS',
'UNKNOWN_ERROR',
] as const;
@ -191,27 +194,42 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
],
message: 'Load Workspace from a AFFiNE file',
}));
const filePath = ret.filePaths?.[0];
if (ret.canceled || !filePath) {
let originalPath = ret.filePaths?.[0];
if (ret.canceled || !originalPath) {
logger.info('loadDBFile canceled');
return { canceled: true };
}
// the imported file should not be in app data dir
if (filePath.startsWith(await getWorkspacesBasePath())) {
if (originalPath.startsWith(await getWorkspacesBasePath())) {
logger.warn('loadDBFile: db file in app data dir');
return { error: 'DB_FILE_PATH_INVALID' };
}
if (await dbFileAlreadyLoaded(filePath)) {
if (await dbFileAlreadyLoaded(originalPath)) {
logger.warn('loadDBFile: db file already loaded');
return { error: 'DB_FILE_ALREADY_LOADED' };
}
const { SqliteConnection } = await import('@affine/native');
if (!(await SqliteConnection.validate(filePath))) {
// TODO: report invalid db file error?
const validationResult = await SqliteConnection.validate(originalPath);
if (validationResult === ValidationResult.MissingDocIdColumn) {
try {
const tmpDBPath = await copyToTemp(originalPath);
await migrateToSubdocAndReplaceDatabase(tmpDBPath);
originalPath = tmpDBPath;
} catch (error) {
logger.warn(`loadDBFile, migration failed: ${originalPath}`, error);
return { error: 'DB_FILE_MIGRATION_FAILED' };
}
}
if (
validationResult !== ValidationResult.MissingDocIdColumn &&
validationResult !== ValidationResult.Valid
) {
return { error: 'DB_FILE_INVALID' }; // invalid db file
}
@ -220,9 +238,8 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
const internalFilePath = await getWorkspaceDBPath(workspaceId);
await fs.ensureDir(await getWorkspacesBasePath());
await fs.copy(filePath, internalFilePath);
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
await fs.copy(originalPath, internalFilePath);
logger.info(`loadDBFile, copy: ${originalPath} -> ${internalFilePath}`);
await storeWorkspaceMeta(workspaceId, {
id: workspaceId,

View File

@ -25,6 +25,9 @@
{
"path": "../../packages/infra"
},
{
"path": "../../packages/env"
},
// Tests
{

View File

@ -313,6 +313,7 @@
"Move folder hint": "Select a new storage location.",
"Storage Folder": "Storage Folder",
"DB_FILE_INVALID": "Invalid Database file",
"DB_FILE_MIGRATION_FAILED": "Database file migration failed",
"Name Your Workspace": "Name Your Workspace",
"Change avatar hint": "New avatar will be shown for everyone.",
"Change workspace name hint": "New name will be shown for everyone.",

View File

@ -7,7 +7,7 @@ export interface WatchOptions {
recursive?: boolean;
}
/** Watcher kind enumeration */
export const enum WatcherKind {
export enum WatcherKind {
/** inotify backend (linux) */
Inotify = 'Inotify',
/** FS-Event backend (mac) */
@ -38,6 +38,12 @@ export interface InsertRow {
docId?: string;
data: Uint8Array;
}
export enum ValidationResult {
MissingTables = 0,
MissingDocIdColumn = 1,
GeneralError = 2,
Valid = 3,
}
export class Subscription {
toString(): string;
unsubscribe(): void;
@ -71,5 +77,6 @@ export class SqliteConnection {
): Promise<void>;
close(): Promise<void>;
get isClose(): boolean;
static validate(path: string): Promise<boolean>;
static validate(path: string): Promise<ValidationResult>;
migrateAddDocId(): Promise<void>;
}

View File

@ -263,11 +263,18 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`);
}
const { WatcherKind, Subscription, FsWatcher, moveFile, SqliteConnection } =
nativeBinding;
const {
WatcherKind,
Subscription,
FsWatcher,
moveFile,
SqliteConnection,
ValidationResult,
} = nativeBinding;
module.exports.WatcherKind = WatcherKind;
module.exports.Subscription = Subscription;
module.exports.FsWatcher = FsWatcher;
module.exports.moveFile = moveFile;
module.exports.SqliteConnection = SqliteConnection;
module.exports.ValidationResult = ValidationResult;

View File

@ -11,6 +11,9 @@
"aarch64-unknown-linux-gnu",
"aarch64-pc-windows-msvc"
]
},
"ts": {
"constEnum": false
}
},
"license": "MPL-2.0",
@ -29,8 +32,8 @@
},
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform --release",
"build:debug": "napi build --platform",
"build": "napi build --platform --release --no-const-enum",
"build:debug": "napi build --platform --no-const-enum",
"universal": "napi universal",
"test": "cross-env TS_NODE_TRANSPILE_ONLY=1 TS_NODE_PROJECT=./tsconfig.json node --test --loader ts-node/esm --experimental-specifier-resolution=node ./__tests__/**/*.mts",
"version": "napi version"

View File

@ -1,5 +1,5 @@
use chrono::NaiveDateTime;
use napi::bindgen_prelude::{Buffer, Uint8Array};
use napi::bindgen_prelude::{Buffer, FromNapiValue, ToNapiValue, Uint8Array};
use napi_derive::napi;
use sqlx::{
migrate::MigrateDatabase,
@ -34,6 +34,14 @@ pub struct SqliteConnection {
path: String,
}
#[napi]
pub enum ValidationResult {
MissingTables,
MissingDocIdColumn,
GeneralError,
Valid,
}
#[napi]
impl SqliteConnection {
#[napi(constructor)]
@ -231,14 +239,14 @@ impl SqliteConnection {
}
#[napi]
pub async fn validate(path: String) -> bool {
pub async fn validate(path: String) -> ValidationResult {
let pool = match SqlitePoolOptions::new()
.max_connections(1)
.connect(&path)
.await
{
Ok(pool) => pool,
Err(_) => return false,
Err(_) => return ValidationResult::GeneralError,
};
let tables_res = sqlx::query("SELECT name FROM sqlite_master WHERE type='table'")
@ -250,26 +258,32 @@ impl SqliteConnection {
let names: Vec<String> = res.iter().map(|row| row.get(0)).collect();
names.contains(&"updates".to_string()) && names.contains(&"blobs".to_string())
}
Err(_) => return false,
Err(_) => return ValidationResult::GeneralError,
};
let columns_res = sqlx::query("PRAGMA table_info(updates)")
.fetch_all(&pool)
.await;
let columns_exist = match columns_res {
let doc_id_exist = match columns_res {
Ok(res) => {
let names: Vec<String> = res.iter().map(|row| row.get(1)).collect();
names.contains(&"data".to_string()) && names.contains(&"doc_id".to_string())
names.contains(&"doc_id".to_string())
}
Err(_) => return false,
Err(_) => return ValidationResult::GeneralError,
};
tables_exist && columns_exist
if !tables_exist {
ValidationResult::MissingTables
} else if !doc_id_exist {
ValidationResult::MissingDocIdColumn
} else {
ValidationResult::Valid
}
}
// todo: have a better way to handle migration
async fn migrate_add_doc_id(&self) -> Result<(), anyhow::Error> {
#[napi]
pub async fn migrate_add_doc_id(&self) -> napi::Result<()> {
// ignore errors
match sqlx::query("ALTER TABLE updates ADD COLUMN doc_id TEXT")
.execute(&self.pool)
@ -280,7 +294,7 @@ impl SqliteConnection {
if err.to_string().contains("duplicate column name") {
Ok(()) // Ignore error if it's due to duplicate column
} else {
Err(anyhow::Error::from(err)) // Propagate other errors
Err(anyhow::Error::from(err).into()) // Propagate other errors
}
}
}

View File

@ -1,4 +1,4 @@
import { resolve } from 'node:path';
import path, { resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { vanillaExtractPlugin } from '@vanilla-extract/vite-plugin';
@ -15,6 +15,8 @@ export default defineConfig({
alias: {
'next/router': 'next-router-mock',
'next/config': resolve(rootDir, './scripts/vitest/next-config-mock.ts'),
// prevent tests using two different sources of yjs
yjs: path.resolve(__dirname, 'node_modules/yjs'),
},
},
define: {

View File

@ -211,6 +211,7 @@ __metadata:
resolution: "@affine/electron@workspace:apps/electron"
dependencies:
"@affine-test/kit": "workspace:*"
"@affine/env": "workspace:*"
"@affine/native": "workspace:*"
"@blocksuite/blocks": 0.0.0-20230705162600-2cb608e4-nightly
"@blocksuite/editor": 0.0.0-20230705162600-2cb608e4-nightly