mirror of
https://github.com/hcengineering/platform.git
synced 2025-01-03 17:05:16 +03:00
UBERF-7297: Allow to backup-restore from v0.6.239 (#5837)
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
bcae079c5c
commit
df4e763781
@ -38,6 +38,7 @@ import {
|
||||
import { setMetadata } from '@hcengineering/platform'
|
||||
import {
|
||||
backup,
|
||||
backupFind,
|
||||
backupList,
|
||||
compactBackup,
|
||||
createFileBackupStorage,
|
||||
@ -60,6 +61,8 @@ import core, {
|
||||
metricsToString,
|
||||
versionToString,
|
||||
type Data,
|
||||
type Doc,
|
||||
type Ref,
|
||||
type Tx,
|
||||
type Version
|
||||
} from '@hcengineering/core'
|
||||
@ -584,6 +587,7 @@ export function devTool (
|
||||
program
|
||||
.command('backup <dirName> <workspace>')
|
||||
.description('dump workspace transactions and minio resources')
|
||||
.option('-i, --include <include>', 'A list of ; separated domain names to include during backup', '*')
|
||||
.option('-s, --skip <skip>', 'A list of ; separated domain names to skip during backup', '')
|
||||
.option('-f, --force', 'Force backup', false)
|
||||
.option('-c, --recheck', 'Force hash recheck on server', false)
|
||||
@ -592,18 +596,27 @@ export function devTool (
|
||||
async (
|
||||
dirName: string,
|
||||
workspace: string,
|
||||
cmd: { skip: string, force: boolean, recheck: boolean, timeout: string }
|
||||
cmd: { skip: string, force: boolean, recheck: boolean, timeout: string, include: string }
|
||||
) => {
|
||||
const storage = await createFileBackupStorage(dirName)
|
||||
await backup(toolCtx, transactorUrl, getWorkspaceId(workspace, productId), storage, {
|
||||
force: cmd.force,
|
||||
recheck: cmd.recheck,
|
||||
include: cmd.include === '*' ? undefined : new Set(cmd.include.split(';').map((it) => it.trim())),
|
||||
skipDomains: (cmd.skip ?? '').split(';').map((it) => it.trim()),
|
||||
timeout: 0,
|
||||
connectTimeout: parseInt(cmd.timeout) * 1000
|
||||
})
|
||||
}
|
||||
)
|
||||
program
|
||||
.command('backup-find <dirName> <fileId>')
|
||||
.description('dump workspace transactions and minio resources')
|
||||
.option('-d, --domain <domain>', 'Check only domain')
|
||||
.action(async (dirName: string, fileId: string, cmd: { domain: string | undefined }) => {
|
||||
const storage = await createFileBackupStorage(dirName)
|
||||
await backupFind(storage, fileId as unknown as Ref<Doc>, cmd.domain)
|
||||
})
|
||||
|
||||
program
|
||||
.command('backup-compact <dirName>')
|
||||
@ -619,7 +632,7 @@ export function devTool (
|
||||
.option('-m, --merge', 'Enable merge of remote and backup content.', false)
|
||||
.option('-p, --parallel <parallel>', 'Enable merge of remote and backup content.', '1')
|
||||
.option('-c, --recheck', 'Force hash recheck on server', false)
|
||||
.option('-s, --include <include>', 'A list of ; separated domain names to include during backup', '*')
|
||||
.option('-i, --include <include>', 'A list of ; separated domain names to include during backup', '*')
|
||||
.option('-s, --skip <skip>', 'A list of ; separated domain names to skip during backup', '')
|
||||
.description('dump workspace transactions and minio resources')
|
||||
.action(
|
||||
|
@ -433,6 +433,7 @@ export async function backup (
|
||||
workspaceId: WorkspaceId,
|
||||
storage: BackupStorage,
|
||||
options: {
|
||||
include?: Set<string>
|
||||
skipDomains: string[]
|
||||
force: boolean
|
||||
recheck: boolean
|
||||
@ -475,7 +476,13 @@ export async function backup (
|
||||
...connection
|
||||
.getHierarchy()
|
||||
.domains()
|
||||
.filter((it) => it !== DOMAIN_TRANSIENT && it !== DOMAIN_MODEL && !options.skipDomains.includes(it))
|
||||
.filter(
|
||||
(it) =>
|
||||
it !== DOMAIN_TRANSIENT &&
|
||||
it !== DOMAIN_MODEL &&
|
||||
!options.skipDomains.includes(it) &&
|
||||
(options.include === undefined || options.include.has(it))
|
||||
)
|
||||
]
|
||||
ctx.info('domains for dump', { domains: domains.length })
|
||||
|
||||
@ -871,6 +878,80 @@ export async function backupList (storage: BackupStorage): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function backupFind (storage: BackupStorage, id: Ref<Doc>, domain?: string): Promise<void> {
|
||||
const infoFile = 'backup.json.gz'
|
||||
|
||||
if (!(await storage.exists(infoFile))) {
|
||||
throw new Error(`${infoFile} should present to restore`)
|
||||
}
|
||||
const backupInfo: BackupInfo = JSON.parse(gunzipSync(await storage.loadFile(infoFile)).toString())
|
||||
console.log('workspace:', backupInfo.workspace ?? '', backupInfo.version)
|
||||
|
||||
const toolCtx = new MeasureMetricsContext('', {})
|
||||
|
||||
const snapshots = backupInfo.snapshots
|
||||
const rnapshots = Array.from(backupInfo.snapshots).reverse()
|
||||
|
||||
// Collect all possible domains
|
||||
const domains = new Set<Domain>()
|
||||
for (const s of snapshots) {
|
||||
Object.keys(s.domains).forEach((it) => domains.add(it as Domain))
|
||||
}
|
||||
|
||||
for (const dd of domains) {
|
||||
if (domain !== undefined && dd !== domain) {
|
||||
continue
|
||||
}
|
||||
console.log('checking:', dd)
|
||||
const sDigest = await loadDigest(toolCtx, storage, snapshots, dd)
|
||||
if (sDigest.has(id)) {
|
||||
console.log('we found file')
|
||||
let found = false
|
||||
for (const sn of rnapshots) {
|
||||
const d = sn.domains[dd]
|
||||
if (found) {
|
||||
break
|
||||
}
|
||||
for (const sf of d?.storage ?? []) {
|
||||
if (found) {
|
||||
break
|
||||
}
|
||||
console.log('processing', sf)
|
||||
const readStream = await storage.load(sf)
|
||||
const ex = extract()
|
||||
|
||||
ex.on('entry', (headers, stream, next) => {
|
||||
if (headers.name === id + '.json') {
|
||||
console.log('file found in:', sf)
|
||||
found = true
|
||||
}
|
||||
next()
|
||||
stream.resume() // just auto drain the stream
|
||||
})
|
||||
|
||||
const endPromise = new Promise((resolve) => {
|
||||
ex.on('finish', () => {
|
||||
resolve(null)
|
||||
})
|
||||
})
|
||||
const unzip = createGunzip({ level: defaultLevel })
|
||||
|
||||
readStream.on('end', () => {
|
||||
readStream.destroy()
|
||||
})
|
||||
readStream.pipe(unzip)
|
||||
unzip.pipe(ex)
|
||||
|
||||
await endPromise
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
* Restore state of DB to specified point.
|
||||
@ -1005,8 +1086,8 @@ export async function restore (
|
||||
sendSize = sendSize + len
|
||||
|
||||
if (sendSize > dataUploadSize || (doc === undefined && docs.length > 0)) {
|
||||
console.log('upload', docs.length, `send: ${totalSend} from ${docsToAdd.size + totalSend}`, 'size:', sendSize)
|
||||
totalSend += docs.length
|
||||
console.log('upload', docs.length, `send: ${totalSend} from ${docsToAdd.size + totalSend}`, 'size:', sendSize)
|
||||
await connection.upload(c, docs)
|
||||
docs.length = 0
|
||||
sendSize = 0
|
||||
@ -1071,8 +1152,8 @@ export async function restore (
|
||||
const bf = Buffer.concat(chunks)
|
||||
const doc = JSON.parse(bf.toString()) as Doc
|
||||
if (doc._class === core.class.Blob || doc._class === 'core:class:BlobData') {
|
||||
migradeBlobData(doc as Blob, changeset.get(doc._id) as string)
|
||||
const d = blobs.get(bname)
|
||||
const data = migradeBlobData(doc as Blob, changeset.get(doc._id) as string)
|
||||
const d = blobs.get(bname) ?? (data !== '' ? Buffer.from(data, 'base64') : undefined)
|
||||
if (d === undefined) {
|
||||
blobs.set(bname, { doc, buffer: undefined })
|
||||
next()
|
||||
@ -1493,7 +1574,7 @@ export async function compactBackup (
|
||||
}
|
||||
|
||||
export * from './service'
|
||||
function migradeBlobData (blob: Blob, etag: string): void {
|
||||
function migradeBlobData (blob: Blob, etag: string): string {
|
||||
if (blob._class === 'core:class:BlobData') {
|
||||
const bd = blob as unknown as BlobData
|
||||
blob.contentType = blob.contentType ?? bd.type
|
||||
@ -1501,6 +1582,9 @@ function migradeBlobData (blob: Blob, etag: string): void {
|
||||
blob.etag = etag
|
||||
blob._class = core.class.Blob
|
||||
delete (blob as any).type
|
||||
const result = (blob as any).base64Data
|
||||
delete (blob as any).base64Data
|
||||
return result
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user