mirror of
https://github.com/toeverything/AFFiNE.git
synced 2025-01-03 16:44:20 +03:00
fix(electron): upgrade db file (#3984)
This commit is contained in:
parent
d62935935f
commit
d9cb45f466
@ -172,9 +172,9 @@ If you encounter any problems when upgrading the version, please feel free to [c
|
|||||||
| AFFiNE Version | Export/Import workspace | Data auto migration |
|
| AFFiNE Version | Export/Import workspace | Data auto migration |
|
||||||
| -------------- | ----------------------- | ------------------- |
|
| -------------- | ----------------------- | ------------------- |
|
||||||
| <= 0.5.4 | ❌️ | ❌ |
|
| <= 0.5.4 | ❌️ | ❌ |
|
||||||
| ^0.6.0 | ⚠️ | ✅ |
|
| 0.6.x | ✅️ | ✅ |
|
||||||
| ^0.7.0 | ⚠️ | ✅ |
|
| 0.7.x | ✅️ | ✅ |
|
||||||
| ^0.8.0 | ✅ | ✅ |
|
| 0.8.x | ✅ | ✅ |
|
||||||
|
|
||||||
## Self-Host
|
## Self-Host
|
||||||
|
|
||||||
|
@ -1,7 +1,11 @@
|
|||||||
|
import { equal } from 'node:assert';
|
||||||
import { resolve } from 'node:path';
|
import { resolve } from 'node:path';
|
||||||
|
|
||||||
import { SqliteConnection } from '@affine/native';
|
import { SqliteConnection } from '@affine/native';
|
||||||
import { migrateToSubdoc } from '@toeverything/infra/blocksuite';
|
import {
|
||||||
|
migrateToSubdoc,
|
||||||
|
WorkspaceVersion,
|
||||||
|
} from '@toeverything/infra/blocksuite';
|
||||||
import fs from 'fs-extra';
|
import fs from 'fs-extra';
|
||||||
import { nanoid } from 'nanoid';
|
import { nanoid } from 'nanoid';
|
||||||
import { applyUpdate, Doc as YDoc, encodeStateAsUpdate } from 'yjs';
|
import { applyUpdate, Doc as YDoc, encodeStateAsUpdate } from 'yjs';
|
||||||
@ -30,6 +34,72 @@ export const migrateToSubdocAndReplaceDatabase = async (path: string) => {
|
|||||||
await db.close();
|
await db.close();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
|
||||||
|
import { Schema, Workspace } from '@blocksuite/store';
|
||||||
|
import { migrateWorkspace } from '@toeverything/infra/blocksuite';
|
||||||
|
|
||||||
|
// v1 v2 -> v3
|
||||||
|
export const migrateToLatestDatabase = async (path: string) => {
|
||||||
|
const connection = new SqliteConnection(path);
|
||||||
|
await connection.connect();
|
||||||
|
await connection.initVersion();
|
||||||
|
const schema = new Schema();
|
||||||
|
schema.register(AffineSchemas).register(__unstableSchemas);
|
||||||
|
const rootDoc = new YDoc();
|
||||||
|
const downloadBinary = async (doc: YDoc, isRoot: boolean): Promise<void> => {
|
||||||
|
const update = (
|
||||||
|
await connection.getUpdates(isRoot ? undefined : doc.guid)
|
||||||
|
).map(update => update.data);
|
||||||
|
// Buffer[] -> Uint8Array
|
||||||
|
const data = new Uint8Array(Buffer.concat(update).buffer);
|
||||||
|
applyUpdate(doc, data);
|
||||||
|
// trigger data manually
|
||||||
|
if (isRoot) {
|
||||||
|
doc.getMap('meta');
|
||||||
|
doc.getMap('spaces');
|
||||||
|
} else {
|
||||||
|
doc.getMap('blocks');
|
||||||
|
}
|
||||||
|
await Promise.all(
|
||||||
|
[...doc.subdocs].map(subdoc => {
|
||||||
|
return downloadBinary(subdoc, false);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
};
|
||||||
|
await downloadBinary(rootDoc, true);
|
||||||
|
const result = await migrateWorkspace(WorkspaceVersion.SubDoc, {
|
||||||
|
getSchema: () => schema,
|
||||||
|
getCurrentRootDoc: () => Promise.resolve(rootDoc),
|
||||||
|
createWorkspace: () =>
|
||||||
|
Promise.resolve(
|
||||||
|
new Workspace({
|
||||||
|
id: nanoid(10),
|
||||||
|
schema,
|
||||||
|
blobStorages: [],
|
||||||
|
providerCreators: [],
|
||||||
|
})
|
||||||
|
),
|
||||||
|
});
|
||||||
|
equal(
|
||||||
|
typeof result,
|
||||||
|
'boolean',
|
||||||
|
'migrateWorkspace should return boolean value'
|
||||||
|
);
|
||||||
|
const uploadBinary = async (doc: YDoc, isRoot: boolean) => {
|
||||||
|
await connection.replaceUpdates(doc.guid, [
|
||||||
|
{ docId: isRoot ? undefined : doc.guid, data: encodeStateAsUpdate(doc) },
|
||||||
|
]);
|
||||||
|
// connection..applyUpdate(encodeStateAsUpdate(doc), 'self', doc.guid)
|
||||||
|
await Promise.all(
|
||||||
|
[...doc.subdocs].map(subdoc => {
|
||||||
|
return uploadBinary(subdoc, false);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
};
|
||||||
|
await uploadBinary(rootDoc, true);
|
||||||
|
await connection.close();
|
||||||
|
};
|
||||||
|
|
||||||
export const copyToTemp = async (path: string) => {
|
export const copyToTemp = async (path: string) => {
|
||||||
const tmpDirPath = resolve(await mainRPC.getPath('sessionData'), 'tmp');
|
const tmpDirPath = resolve(await mainRPC.getPath('sessionData'), 'tmp');
|
||||||
const tmpFilePath = resolve(tmpDirPath, nanoid());
|
const tmpFilePath = resolve(tmpDirPath, nanoid());
|
||||||
|
@ -12,7 +12,11 @@ import fs from 'fs-extra';
|
|||||||
import { nanoid } from 'nanoid';
|
import { nanoid } from 'nanoid';
|
||||||
|
|
||||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||||
import { copyToTemp, migrateToSubdocAndReplaceDatabase } from '../db/migration';
|
import {
|
||||||
|
copyToTemp,
|
||||||
|
migrateToLatestDatabase,
|
||||||
|
migrateToSubdocAndReplaceDatabase,
|
||||||
|
} from '../db/migration';
|
||||||
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
|
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
|
||||||
import { logger } from '../logger';
|
import { logger } from '../logger';
|
||||||
import { mainRPC } from '../main-rpc';
|
import { mainRPC } from '../main-rpc';
|
||||||
@ -197,7 +201,22 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (validationResult === ValidationResult.MissingVersionColumn) {
|
||||||
|
try {
|
||||||
|
const tmpDBPath = await copyToTemp(originalPath);
|
||||||
|
await migrateToLatestDatabase(tmpDBPath);
|
||||||
|
originalPath = tmpDBPath;
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(
|
||||||
|
`loadDBFile, migration version column failed: ${originalPath}`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
return { error: 'DB_FILE_MIGRATION_FAILED' };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
validationResult !== ValidationResult.MissingVersionColumn &&
|
||||||
validationResult !== ValidationResult.MissingDocIdColumn &&
|
validationResult !== ValidationResult.MissingDocIdColumn &&
|
||||||
validationResult !== ValidationResult.Valid
|
validationResult !== ValidationResult.Valid
|
||||||
) {
|
) {
|
||||||
|
@ -490,9 +490,14 @@ const upgradeV1ToV2 = async (options: UpgradeOptions) => {
|
|||||||
return newWorkspace;
|
return newWorkspace;
|
||||||
};
|
};
|
||||||
|
|
||||||
const upgradeV2ToV3 = async (options: UpgradeOptions): Promise<true> => {
|
const upgradeV2ToV3 = async (options: UpgradeOptions): Promise<boolean> => {
|
||||||
const rootDoc = await options.getCurrentRootDoc();
|
const rootDoc = await options.getCurrentRootDoc();
|
||||||
const spaces = rootDoc.getMap('spaces') as YMap<any>;
|
const spaces = rootDoc.getMap('spaces') as YMap<any>;
|
||||||
|
const meta = rootDoc.getMap('meta') as YMap<unknown>;
|
||||||
|
const versions = meta.get('blockVersions') as YMap<number>;
|
||||||
|
if (versions.get('affine:database') === 3) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
const schema = options.getSchema();
|
const schema = options.getSchema();
|
||||||
spaces.forEach(space => {
|
spaces.forEach(space => {
|
||||||
schema.upgradePage(
|
schema.upgradePage(
|
||||||
@ -511,8 +516,6 @@ const upgradeV2ToV3 = async (options: UpgradeOptions): Promise<true> => {
|
|||||||
space
|
space
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
const meta = rootDoc.getMap('meta') as YMap<unknown>;
|
|
||||||
const versions = meta.get('blockVersions') as YMap<number>;
|
|
||||||
versions.set('affine:database', 3);
|
versions.set('affine:database', 3);
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
15
packages/native/__tests__/db.spec.mts
Normal file
15
packages/native/__tests__/db.spec.mts
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
import assert from 'node:assert';
|
||||||
|
import { test } from 'node:test';
|
||||||
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
|
import { SqliteConnection, ValidationResult } from '../index';
|
||||||
|
|
||||||
|
test('db', { concurrency: false }, async t => {
|
||||||
|
await t.test('validate', async () => {
|
||||||
|
const path = fileURLToPath(
|
||||||
|
new URL('./fixtures/test01.affine', import.meta.url)
|
||||||
|
);
|
||||||
|
const result = await SqliteConnection.validate(path);
|
||||||
|
assert.equal(result, ValidationResult.MissingVersionColumn);
|
||||||
|
});
|
||||||
|
});
|
BIN
packages/native/__tests__/fixtures/test01.affine
Normal file
BIN
packages/native/__tests__/fixtures/test01.affine
Normal file
Binary file not shown.
7
packages/native/index.d.ts
vendored
7
packages/native/index.d.ts
vendored
@ -41,8 +41,9 @@ export interface InsertRow {
|
|||||||
export enum ValidationResult {
|
export enum ValidationResult {
|
||||||
MissingTables = 0,
|
MissingTables = 0,
|
||||||
MissingDocIdColumn = 1,
|
MissingDocIdColumn = 1,
|
||||||
GeneralError = 2,
|
MissingVersionColumn = 2,
|
||||||
Valid = 3,
|
GeneralError = 3,
|
||||||
|
Valid = 4,
|
||||||
}
|
}
|
||||||
export class Subscription {
|
export class Subscription {
|
||||||
toString(): string;
|
toString(): string;
|
||||||
@ -75,6 +76,8 @@ export class SqliteConnection {
|
|||||||
docId: string | undefined | null,
|
docId: string | undefined | null,
|
||||||
updates: Array<InsertRow>
|
updates: Array<InsertRow>
|
||||||
): Promise<void>;
|
): Promise<void>;
|
||||||
|
initVersion(): Promise<void>;
|
||||||
|
setVersion(version: number): Promise<void>;
|
||||||
close(): Promise<void>;
|
close(): Promise<void>;
|
||||||
get isClose(): boolean;
|
get isClose(): boolean;
|
||||||
static validate(path: string): Promise<ValidationResult>;
|
static validate(path: string): Promise<ValidationResult>;
|
||||||
|
@ -11,4 +11,9 @@ CREATE TABLE IF NOT EXISTS "blobs" (
|
|||||||
key TEXT PRIMARY KEY NOT NULL,
|
key TEXT PRIMARY KEY NOT NULL,
|
||||||
data BLOB NOT NULL,
|
data BLOB NOT NULL,
|
||||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||||
);"#;
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS "version_info" (
|
||||||
|
version NUMBER NOT NULL,
|
||||||
|
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||||
|
)
|
||||||
|
"#;
|
||||||
|
@ -7,6 +7,9 @@ use sqlx::{
|
|||||||
Pool, Row,
|
Pool, Row,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// latest version
|
||||||
|
const LATEST_VERSION: i32 = 3;
|
||||||
|
|
||||||
#[napi(object)]
|
#[napi(object)]
|
||||||
pub struct BlobRow {
|
pub struct BlobRow {
|
||||||
pub key: String,
|
pub key: String,
|
||||||
@ -38,6 +41,7 @@ pub struct SqliteConnection {
|
|||||||
pub enum ValidationResult {
|
pub enum ValidationResult {
|
||||||
MissingTables,
|
MissingTables,
|
||||||
MissingDocIdColumn,
|
MissingDocIdColumn,
|
||||||
|
MissingVersionColumn,
|
||||||
GeneralError,
|
GeneralError,
|
||||||
Valid,
|
Valid,
|
||||||
}
|
}
|
||||||
@ -228,6 +232,39 @@ impl SqliteConnection {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub async fn init_version(&self) -> napi::Result<()> {
|
||||||
|
// create version_info table
|
||||||
|
sqlx::query!(
|
||||||
|
"CREATE TABLE IF NOT EXISTS version_info (
|
||||||
|
version NUMBER NOT NULL,
|
||||||
|
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||||
|
)"
|
||||||
|
)
|
||||||
|
.execute(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(anyhow::Error::from)?;
|
||||||
|
// `3` is the first version that has version_info table,
|
||||||
|
// do not modify the version number.
|
||||||
|
sqlx::query!("INSERT INTO version_info (version) VALUES (3)")
|
||||||
|
.execute(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(anyhow::Error::from)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub async fn set_version(&self, version: i32) -> napi::Result<()> {
|
||||||
|
if version > LATEST_VERSION {
|
||||||
|
return Err(anyhow::Error::msg("Version is too new").into());
|
||||||
|
}
|
||||||
|
sqlx::query!("UPDATE version_info SET version = ?", version)
|
||||||
|
.execute(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(anyhow::Error::from)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub async fn close(&self) {
|
pub async fn close(&self) {
|
||||||
self.pool.close().await;
|
self.pool.close().await;
|
||||||
@ -261,6 +298,18 @@ impl SqliteConnection {
|
|||||||
Err(_) => return ValidationResult::GeneralError,
|
Err(_) => return ValidationResult::GeneralError,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let tables_res = sqlx::query("SELECT name FROM sqlite_master WHERE type='table'")
|
||||||
|
.fetch_all(&pool)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let version_exist = match tables_res {
|
||||||
|
Ok(res) => {
|
||||||
|
let names: Vec<String> = res.iter().map(|row| row.get(0)).collect();
|
||||||
|
names.contains(&"version_info".to_string())
|
||||||
|
}
|
||||||
|
Err(_) => return ValidationResult::GeneralError,
|
||||||
|
};
|
||||||
|
|
||||||
let columns_res = sqlx::query("PRAGMA table_info(updates)")
|
let columns_res = sqlx::query("PRAGMA table_info(updates)")
|
||||||
.fetch_all(&pool)
|
.fetch_all(&pool)
|
||||||
.await;
|
.await;
|
||||||
@ -277,6 +326,8 @@ impl SqliteConnection {
|
|||||||
ValidationResult::MissingTables
|
ValidationResult::MissingTables
|
||||||
} else if !doc_id_exist {
|
} else if !doc_id_exist {
|
||||||
ValidationResult::MissingDocIdColumn
|
ValidationResult::MissingDocIdColumn
|
||||||
|
} else if !version_exist {
|
||||||
|
ValidationResult::MissingVersionColumn
|
||||||
} else {
|
} else {
|
||||||
ValidationResult::Valid
|
ValidationResult::Valid
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user