mirror of
https://github.com/hcengineering/platform.git
synced 2024-11-22 03:14:40 +03:00
UBERF-8226: Fix backup service OOM (#6683)
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
678da06af4
commit
e72766a6b2
11
.vscode/launch.json
vendored
11
.vscode/launch.json
vendored
@ -293,16 +293,17 @@
|
|||||||
"args": ["src/index.ts"],
|
"args": ["src/index.ts"],
|
||||||
"env": {
|
"env": {
|
||||||
"ACCOUNTS_URL": "http://localhost:3000",
|
"ACCOUNTS_URL": "http://localhost:3000",
|
||||||
"MINIO_ENDPOINT": "localhost",
|
"STORAGE": "minio|localhost?accessKey=minioadmin&secretKey=minioadmin",
|
||||||
"MINIO_ACCESS_KEY": "minioadmin",
|
"WORKSPACE_STORAGE": "minio|localhost?accessKey=minioadmin&secretKey=minioadmin",
|
||||||
"MINIO_SECRET_KEY": "minioadmin",
|
"MONGO_URL": "mongodb://localhost:27017",
|
||||||
"SECRET": "secret",
|
"SECRET": "secret",
|
||||||
"BUCKET_NAME":"test_backups",
|
"BUCKET_NAME":"backups",
|
||||||
"INTERVAL":"30"
|
"INTERVAL":"30",
|
||||||
},
|
},
|
||||||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
|
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
|
||||||
"showAsyncStacks": true,
|
"showAsyncStacks": true,
|
||||||
"sourceMaps": true,
|
"sourceMaps": true,
|
||||||
|
"outputCapture": "std",
|
||||||
"cwd": "${workspaceRoot}/pods/backup",
|
"cwd": "${workspaceRoot}/pods/backup",
|
||||||
"protocol": "inspector"
|
"protocol": "inspector"
|
||||||
},
|
},
|
||||||
|
@ -55,7 +55,7 @@ async function doBackup (dirName: string, token: string, endpoint: string, works
|
|||||||
progress: (value: number): void => {
|
progress: (value: number): void => {
|
||||||
notify('backup', value)
|
notify('backup', value)
|
||||||
},
|
},
|
||||||
connection: client
|
getConnection: async () => client
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
await client.close()
|
await client.close()
|
||||||
|
@ -71,8 +71,6 @@ export class SplitLogger implements MeasureLogger {
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
} else {
|
|
||||||
console.log('Logging only in files')
|
|
||||||
}
|
}
|
||||||
this.logger.info(
|
this.logger.info(
|
||||||
'####################################################################################################################'
|
'####################################################################################################################'
|
||||||
|
@ -14,16 +14,16 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
import { Analytics } from '@hcengineering/analytics'
|
import { Analytics } from '@hcengineering/analytics'
|
||||||
|
import { configureAnalytics, SplitLogger } from '@hcengineering/analytics-service'
|
||||||
import { startBackup } from '@hcengineering/backup-service'
|
import { startBackup } from '@hcengineering/backup-service'
|
||||||
import { MeasureMetricsContext, metricsToString, newMetrics } from '@hcengineering/core'
|
import { MeasureMetricsContext, metricsToString, newMetrics } from '@hcengineering/core'
|
||||||
import { DummyDbAdapter, DummyFullTextAdapter, type PipelineFactory } from '@hcengineering/server-core'
|
import { type PipelineFactory } from '@hcengineering/server-core'
|
||||||
import { createServerPipeline } from '@hcengineering/server-pipeline'
|
import { createBackupPipeline, getConfig } from '@hcengineering/server-pipeline'
|
||||||
import { configureAnalytics, SplitLogger } from '@hcengineering/analytics-service'
|
|
||||||
import { writeFile } from 'fs/promises'
|
import { writeFile } from 'fs/promises'
|
||||||
import { join } from 'path'
|
import { join } from 'path'
|
||||||
|
|
||||||
const metricsContext = new MeasureMetricsContext(
|
const metricsContext = new MeasureMetricsContext(
|
||||||
'github',
|
'backup',
|
||||||
{},
|
{},
|
||||||
{},
|
{},
|
||||||
newMetrics(),
|
newMetrics(),
|
||||||
@ -55,35 +55,26 @@ const onClose = (): void => {
|
|||||||
metricsContext.info('Closed')
|
metricsContext.info('Closed')
|
||||||
}
|
}
|
||||||
|
|
||||||
startBackup(metricsContext, (mongoUrl, storageAdapter) => {
|
startBackup(
|
||||||
const factory: PipelineFactory = createServerPipeline(
|
metricsContext,
|
||||||
metricsContext,
|
(mongoUrl, storageAdapter) => {
|
||||||
mongoUrl,
|
const factory: PipelineFactory = createBackupPipeline(metricsContext, mongoUrl, {
|
||||||
{
|
|
||||||
externalStorage: storageAdapter,
|
externalStorage: storageAdapter,
|
||||||
|
usePassedCtx: true
|
||||||
|
})
|
||||||
|
return factory
|
||||||
|
},
|
||||||
|
(ctx, dbUrls, workspace, branding, externalStorage) => {
|
||||||
|
return getConfig(ctx, dbUrls, workspace, branding, ctx, {
|
||||||
|
externalStorage,
|
||||||
fullTextUrl: '',
|
fullTextUrl: '',
|
||||||
indexParallel: 0,
|
indexParallel: 0,
|
||||||
indexProcessing: 0,
|
indexProcessing: 0,
|
||||||
disableTriggers: true,
|
|
||||||
rekoniUrl: '',
|
rekoniUrl: '',
|
||||||
usePassedCtx: true
|
disableTriggers: true
|
||||||
},
|
})
|
||||||
{
|
}
|
||||||
adapters: {
|
)
|
||||||
FullTextBlob: {
|
|
||||||
factory: async () => new DummyDbAdapter(),
|
|
||||||
url: ''
|
|
||||||
}
|
|
||||||
},
|
|
||||||
fulltextAdapter: {
|
|
||||||
factory: async () => new DummyFullTextAdapter(),
|
|
||||||
stages: () => [],
|
|
||||||
url: ''
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return factory
|
|
||||||
})
|
|
||||||
|
|
||||||
process.on('SIGINT', onClose)
|
process.on('SIGINT', onClose)
|
||||||
process.on('SIGTERM', onClose)
|
process.on('SIGTERM', onClose)
|
||||||
|
@ -13,30 +13,39 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
//
|
//
|
||||||
|
|
||||||
import { MeasureContext, systemAccountEmail } from '@hcengineering/core'
|
import { MeasureContext, systemAccountEmail, type Branding, type WorkspaceIdWithUrl } from '@hcengineering/core'
|
||||||
import { setMetadata } from '@hcengineering/platform'
|
import { setMetadata } from '@hcengineering/platform'
|
||||||
import { backupService } from '@hcengineering/server-backup'
|
import { backupService } from '@hcengineering/server-backup'
|
||||||
import serverClientPlugin from '@hcengineering/server-client'
|
import serverClientPlugin from '@hcengineering/server-client'
|
||||||
import { type PipelineFactory, type StorageAdapter } from '@hcengineering/server-core'
|
import { type DbConfiguration, type PipelineFactory, type StorageAdapter } from '@hcengineering/server-core'
|
||||||
import { buildStorageFromConfig, createStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
|
import { buildStorageFromConfig, createStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
|
||||||
import serverToken, { generateToken } from '@hcengineering/server-token'
|
import serverToken, { generateToken } from '@hcengineering/server-token'
|
||||||
import config from './config'
|
import config from './config'
|
||||||
|
|
||||||
export function startBackup (
|
export function startBackup (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
pipelineFactoryFactory: (mongoUrl: string, storage: StorageAdapter) => PipelineFactory
|
pipelineFactoryFactory: (mongoUrl: string, storage: StorageAdapter) => PipelineFactory,
|
||||||
|
getConfig: (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
dbUrls: string,
|
||||||
|
workspace: WorkspaceIdWithUrl,
|
||||||
|
branding: Branding | null,
|
||||||
|
externalStorage: StorageAdapter
|
||||||
|
) => DbConfiguration
|
||||||
): void {
|
): void {
|
||||||
setMetadata(serverToken.metadata.Secret, config.Secret)
|
setMetadata(serverToken.metadata.Secret, config.Secret)
|
||||||
setMetadata(serverClientPlugin.metadata.Endpoint, config.AccountsURL)
|
setMetadata(serverClientPlugin.metadata.Endpoint, config.AccountsURL)
|
||||||
setMetadata(serverClientPlugin.metadata.UserAgent, config.ServiceID)
|
setMetadata(serverClientPlugin.metadata.UserAgent, config.ServiceID)
|
||||||
|
|
||||||
|
const [mainDbUrl, rawDbUrl] = config.MongoURL.split(';')
|
||||||
|
|
||||||
const backupStorageConfig = storageConfigFromEnv(config.Storage)
|
const backupStorageConfig = storageConfigFromEnv(config.Storage)
|
||||||
const workspaceStorageConfig = storageConfigFromEnv(config.WorkspaceStorage)
|
const workspaceStorageConfig = storageConfigFromEnv(config.WorkspaceStorage)
|
||||||
|
|
||||||
const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0])
|
const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0])
|
||||||
const workspaceStorageAdapter = buildStorageFromConfig(workspaceStorageConfig, config.MongoURL)
|
const workspaceStorageAdapter = buildStorageFromConfig(workspaceStorageConfig, rawDbUrl ?? mainDbUrl)
|
||||||
|
|
||||||
const pipelineFactory = pipelineFactoryFactory(config.MongoURL, workspaceStorageAdapter)
|
const pipelineFactory = pipelineFactoryFactory(mainDbUrl, workspaceStorageAdapter)
|
||||||
|
|
||||||
// A token to access account service
|
// A token to access account service
|
||||||
const token = generateToken(systemAccountEmail, { name: 'backup' })
|
const token = generateToken(systemAccountEmail, { name: 'backup' })
|
||||||
@ -46,7 +55,10 @@ export function startBackup (
|
|||||||
storageAdapter,
|
storageAdapter,
|
||||||
{ ...config, Token: token },
|
{ ...config, Token: token },
|
||||||
pipelineFactory,
|
pipelineFactory,
|
||||||
workspaceStorageAdapter
|
workspaceStorageAdapter,
|
||||||
|
(ctx, workspace, branding, externalStorage) => {
|
||||||
|
return getConfig(ctx, mainDbUrl, workspace, branding, externalStorage)
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
process.on('SIGINT', shutdown)
|
process.on('SIGINT', shutdown)
|
||||||
|
@ -35,7 +35,8 @@ import core, {
|
|||||||
TxCollectionCUD,
|
TxCollectionCUD,
|
||||||
WorkspaceId,
|
WorkspaceId,
|
||||||
type Blob,
|
type Blob,
|
||||||
type DocIndexState
|
type DocIndexState,
|
||||||
|
type Tx
|
||||||
} from '@hcengineering/core'
|
} from '@hcengineering/core'
|
||||||
import { BlobClient, createClient } from '@hcengineering/server-client'
|
import { BlobClient, createClient } from '@hcengineering/server-client'
|
||||||
import { fullTextPushStagePrefix, type StorageAdapter } from '@hcengineering/server-core'
|
import { fullTextPushStagePrefix, type StorageAdapter } from '@hcengineering/server-core'
|
||||||
@ -514,7 +515,8 @@ export async function backup (
|
|||||||
connectTimeout: number
|
connectTimeout: number
|
||||||
skipBlobContentTypes: string[]
|
skipBlobContentTypes: string[]
|
||||||
blobDownloadLimit: number
|
blobDownloadLimit: number
|
||||||
connection?: CoreClient & BackupClient
|
getLastTx?: () => Promise<Tx | undefined>
|
||||||
|
getConnection?: () => Promise<CoreClient & BackupClient>
|
||||||
storageAdapter?: StorageAdapter
|
storageAdapter?: StorageAdapter
|
||||||
// Return true in case
|
// Return true in case
|
||||||
isCanceled?: () => boolean
|
isCanceled?: () => boolean
|
||||||
@ -529,7 +531,7 @@ export async function backup (
|
|||||||
skipBlobContentTypes: [],
|
skipBlobContentTypes: [],
|
||||||
blobDownloadLimit: 15
|
blobDownloadLimit: 15
|
||||||
}
|
}
|
||||||
): Promise<void> {
|
): Promise<boolean> {
|
||||||
ctx = ctx.newChild('backup', {
|
ctx = ctx.newChild('backup', {
|
||||||
workspaceId: workspaceId.name,
|
workspaceId: workspaceId.name,
|
||||||
force: options.force,
|
force: options.force,
|
||||||
@ -555,36 +557,11 @@ export async function backup (
|
|||||||
}, options.timeout)
|
}, options.timeout)
|
||||||
}
|
}
|
||||||
|
|
||||||
const token =
|
const st = Date.now()
|
||||||
options.token ??
|
let connection!: CoreClient & BackupClient
|
||||||
generateToken(systemAccountEmail, workspaceId, {
|
let printEnd = true
|
||||||
mode: 'backup'
|
|
||||||
})
|
|
||||||
|
|
||||||
const connection =
|
|
||||||
options.connection ??
|
|
||||||
((await createClient(transactorUrl, options.token ?? token, undefined, options.connectTimeout)) as CoreClient &
|
|
||||||
BackupClient)
|
|
||||||
|
|
||||||
const blobClient = new BlobClient(transactorUrl, token, workspaceId, { storageAdapter: options.storageAdapter })
|
|
||||||
ctx.info('starting backup', { workspace: workspaceId.name })
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const domains = [
|
|
||||||
...connection
|
|
||||||
.getHierarchy()
|
|
||||||
.domains()
|
|
||||||
.filter(
|
|
||||||
(it) =>
|
|
||||||
it !== DOMAIN_TRANSIENT &&
|
|
||||||
it !== DOMAIN_MODEL &&
|
|
||||||
it !== ('fulltext-blob' as Domain) &&
|
|
||||||
!options.skipDomains.includes(it) &&
|
|
||||||
(options.include === undefined || options.include.has(it))
|
|
||||||
)
|
|
||||||
]
|
|
||||||
ctx.info('domains for dump', { domains: domains.length })
|
|
||||||
|
|
||||||
let backupInfo: BackupInfo = {
|
let backupInfo: BackupInfo = {
|
||||||
workspace: workspaceId.name,
|
workspace: workspaceId.name,
|
||||||
version: '0.6.2',
|
version: '0.6.2',
|
||||||
@ -602,19 +579,70 @@ export async function backup (
|
|||||||
|
|
||||||
backupInfo.workspace = workspaceId.name
|
backupInfo.workspace = workspaceId.name
|
||||||
|
|
||||||
|
let lastTx: Tx | undefined
|
||||||
|
|
||||||
|
let lastTxChecked = false
|
||||||
// Skip backup if there is no transaction changes.
|
// Skip backup if there is no transaction changes.
|
||||||
const lastTx = await connection.findOne(
|
if (options.getLastTx !== undefined) {
|
||||||
core.class.Tx,
|
lastTx = await options.getLastTx()
|
||||||
{},
|
if (lastTx !== undefined) {
|
||||||
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
|
if (lastTx._id === backupInfo.lastTxId && !options.force) {
|
||||||
)
|
printEnd = false
|
||||||
if (lastTx !== undefined) {
|
ctx.info('No transaction changes. Skipping backup.', { workspace: workspaceId.name })
|
||||||
if (lastTx._id === backupInfo.lastTxId && !options.force) {
|
return false
|
||||||
ctx.info('No transaction changes. Skipping backup.', { workspace: workspaceId.name })
|
}
|
||||||
return
|
}
|
||||||
|
lastTxChecked = true
|
||||||
|
}
|
||||||
|
const token =
|
||||||
|
options.token ??
|
||||||
|
generateToken(systemAccountEmail, workspaceId, {
|
||||||
|
mode: 'backup'
|
||||||
|
})
|
||||||
|
|
||||||
|
ctx.warn('starting backup', { workspace: workspaceId.name })
|
||||||
|
|
||||||
|
connection =
|
||||||
|
options.getConnection !== undefined
|
||||||
|
? await options.getConnection()
|
||||||
|
: ((await createClient(
|
||||||
|
transactorUrl,
|
||||||
|
options.token ?? token,
|
||||||
|
undefined,
|
||||||
|
options.connectTimeout
|
||||||
|
)) as CoreClient & BackupClient)
|
||||||
|
|
||||||
|
if (!lastTxChecked) {
|
||||||
|
lastTx = await connection.findOne(core.class.Tx, {}, { limit: 1, sort: { modifiedOn: SortingOrder.Descending } })
|
||||||
|
if (lastTx !== undefined) {
|
||||||
|
if (lastTx._id === backupInfo.lastTxId && !options.force) {
|
||||||
|
ctx.info('No transaction changes. Skipping backup.', { workspace: workspaceId.name })
|
||||||
|
if (options.getConnection === undefined) {
|
||||||
|
await connection.close()
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const blobClient = new BlobClient(transactorUrl, token, workspaceId, { storageAdapter: options.storageAdapter })
|
||||||
|
|
||||||
|
const domains = [
|
||||||
|
...connection
|
||||||
|
.getHierarchy()
|
||||||
|
.domains()
|
||||||
|
.filter(
|
||||||
|
(it) =>
|
||||||
|
it !== DOMAIN_TRANSIENT &&
|
||||||
|
it !== DOMAIN_MODEL &&
|
||||||
|
it !== ('fulltext-blob' as Domain) &&
|
||||||
|
!options.skipDomains.includes(it) &&
|
||||||
|
(options.include === undefined || options.include.has(it))
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
ctx.info('domains for dump', { domains: domains.length })
|
||||||
|
|
||||||
backupInfo.lastTxId = '' // Clear until full backup will be complete
|
backupInfo.lastTxId = '' // Clear until full backup will be complete
|
||||||
|
|
||||||
const snapshot: BackupSnapshot = {
|
const snapshot: BackupSnapshot = {
|
||||||
@ -1006,11 +1034,15 @@ export async function backup (
|
|||||||
backupInfo.lastTxId = lastTx?._id ?? '0' // We could store last tx, since full backup is complete
|
backupInfo.lastTxId = lastTx?._id ?? '0' // We could store last tx, since full backup is complete
|
||||||
await storage.writeFile(infoFile, gzipSync(JSON.stringify(backupInfo, undefined, 2), { level: defaultLevel }))
|
await storage.writeFile(infoFile, gzipSync(JSON.stringify(backupInfo, undefined, 2), { level: defaultLevel }))
|
||||||
}
|
}
|
||||||
|
return true
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
ctx.error('backup error', { err, workspace: workspaceId.name })
|
ctx.error('backup error', { err, workspace: workspaceId.name })
|
||||||
|
return false
|
||||||
} finally {
|
} finally {
|
||||||
ctx.info('end backup', { workspace: workspaceId.name })
|
if (printEnd) {
|
||||||
if (options.connection === undefined) {
|
ctx.info('end backup', { workspace: workspaceId.name, totalTime: Date.now() - st })
|
||||||
|
}
|
||||||
|
if (options.getConnection === undefined && connection !== undefined) {
|
||||||
await connection.close()
|
await connection.close()
|
||||||
}
|
}
|
||||||
ctx.end()
|
ctx.end()
|
||||||
|
@ -15,11 +15,17 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
BaseWorkspaceInfo,
|
BaseWorkspaceInfo,
|
||||||
|
DOMAIN_TX,
|
||||||
getWorkspaceId,
|
getWorkspaceId,
|
||||||
|
Hierarchy,
|
||||||
|
ModelDb,
|
||||||
|
SortingOrder,
|
||||||
systemAccountEmail,
|
systemAccountEmail,
|
||||||
type BackupClient,
|
type BackupClient,
|
||||||
|
type Branding,
|
||||||
type Client,
|
type Client,
|
||||||
type MeasureContext,
|
type MeasureContext,
|
||||||
|
type Tx,
|
||||||
type WorkspaceIdWithUrl
|
type WorkspaceIdWithUrl
|
||||||
} from '@hcengineering/core'
|
} from '@hcengineering/core'
|
||||||
import { PlatformError, unknownError } from '@hcengineering/platform'
|
import { PlatformError, unknownError } from '@hcengineering/platform'
|
||||||
@ -27,6 +33,7 @@ import { listAccountWorkspaces } from '@hcengineering/server-client'
|
|||||||
import {
|
import {
|
||||||
BackupClientOps,
|
BackupClientOps,
|
||||||
SessionDataImpl,
|
SessionDataImpl,
|
||||||
|
type DbConfiguration,
|
||||||
type Pipeline,
|
type Pipeline,
|
||||||
type PipelineFactory,
|
type PipelineFactory,
|
||||||
type StorageAdapter
|
type StorageAdapter
|
||||||
@ -48,7 +55,13 @@ class BackupWorker {
|
|||||||
readonly storageAdapter: StorageAdapter,
|
readonly storageAdapter: StorageAdapter,
|
||||||
readonly config: BackupConfig,
|
readonly config: BackupConfig,
|
||||||
readonly pipelineFactory: PipelineFactory,
|
readonly pipelineFactory: PipelineFactory,
|
||||||
readonly workspaceStorageAdapter: StorageAdapter
|
readonly workspaceStorageAdapter: StorageAdapter,
|
||||||
|
readonly getConfig: (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
workspace: WorkspaceIdWithUrl,
|
||||||
|
branding: Branding | null,
|
||||||
|
externalStorage: StorageAdapter
|
||||||
|
) => DbConfiguration
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
canceled = false
|
canceled = false
|
||||||
@ -61,11 +74,26 @@ class BackupWorker {
|
|||||||
|
|
||||||
backupPromise: Promise<void> | undefined
|
backupPromise: Promise<void> | undefined
|
||||||
|
|
||||||
|
printStats (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
stats: { failedWorkspaces: BaseWorkspaceInfo[], processed: number, skipped: number }
|
||||||
|
): void {
|
||||||
|
ctx.warn(
|
||||||
|
`****************************************
|
||||||
|
backup statistics:`,
|
||||||
|
{
|
||||||
|
backuped: stats.processed,
|
||||||
|
notChanges: stats.skipped,
|
||||||
|
failed: stats.failedWorkspaces.length
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
async triggerBackup (ctx: MeasureContext): Promise<void> {
|
async triggerBackup (ctx: MeasureContext): Promise<void> {
|
||||||
const failed = await this.backup(ctx)
|
const { failedWorkspaces } = await this.backup(ctx)
|
||||||
if (failed.length > 0) {
|
if (failedWorkspaces.length > 0) {
|
||||||
ctx.info('Failed to backup workspaces, Retry failed workspaces once.', { failed: failed.length })
|
ctx.info('Failed to backup workspaces, Retry failed workspaces once.', { failed: failedWorkspaces.length })
|
||||||
await this.doBackup(ctx, failed)
|
this.printStats(ctx, await this.doBackup(ctx, failedWorkspaces))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,7 +109,9 @@ class BackupWorker {
|
|||||||
}, this.config.Interval * 1000)
|
}, this.config.Interval * 1000)
|
||||||
}
|
}
|
||||||
|
|
||||||
async backup (ctx: MeasureContext): Promise<BaseWorkspaceInfo[]> {
|
async backup (
|
||||||
|
ctx: MeasureContext
|
||||||
|
): Promise<{ failedWorkspaces: BaseWorkspaceInfo[], processed: number, skipped: number }> {
|
||||||
const workspacesIgnore = new Set(this.config.SkipWorkspaces.split(';'))
|
const workspacesIgnore = new Set(this.config.SkipWorkspaces.split(';'))
|
||||||
const workspaces = (await listAccountWorkspaces(this.config.Token)).filter((it) => {
|
const workspaces = (await listAccountWorkspaces(this.config.Token)).filter((it) => {
|
||||||
return !workspacesIgnore.has(it.workspace)
|
return !workspacesIgnore.has(it.workspace)
|
||||||
@ -90,16 +120,20 @@ class BackupWorker {
|
|||||||
return await this.doBackup(ctx, workspaces)
|
return await this.doBackup(ctx, workspaces)
|
||||||
}
|
}
|
||||||
|
|
||||||
async doBackup (rootCtx: MeasureContext, workspaces: BaseWorkspaceInfo[]): Promise<BaseWorkspaceInfo[]> {
|
async doBackup (
|
||||||
|
rootCtx: MeasureContext,
|
||||||
|
workspaces: BaseWorkspaceInfo[]
|
||||||
|
): Promise<{ failedWorkspaces: BaseWorkspaceInfo[], processed: number, skipped: number }> {
|
||||||
let index = 0
|
let index = 0
|
||||||
|
|
||||||
const failedWorkspaces: BaseWorkspaceInfo[] = []
|
const failedWorkspaces: BaseWorkspaceInfo[] = []
|
||||||
|
let processed = 0
|
||||||
for (const ws of workspaces) {
|
for (const ws of workspaces) {
|
||||||
if (this.canceled) {
|
if (this.canceled) {
|
||||||
return failedWorkspaces
|
return { failedWorkspaces, processed, skipped: workspaces.length - processed }
|
||||||
}
|
}
|
||||||
index++
|
index++
|
||||||
rootCtx.info('\n\nBACKUP WORKSPACE ', {
|
rootCtx.warn('\n\nBACKUP WORKSPACE ', {
|
||||||
workspace: ws.workspace,
|
workspace: ws.workspace,
|
||||||
index,
|
index,
|
||||||
total: workspaces.length
|
total: workspaces.length
|
||||||
@ -109,6 +143,7 @@ class BackupWorker {
|
|||||||
enableConsole: 'true'
|
enableConsole: 'true'
|
||||||
})
|
})
|
||||||
const ctx = rootCtx.newChild(ws.workspace, { workspace: ws.workspace }, {}, childLogger)
|
const ctx = rootCtx.newChild(ws.workspace, { workspace: ws.workspace }, {}, childLogger)
|
||||||
|
let pipeline: Pipeline | undefined
|
||||||
try {
|
try {
|
||||||
const storage = await createStorageBackupStorage(
|
const storage = await createStorageBackupStorage(
|
||||||
ctx,
|
ctx,
|
||||||
@ -121,28 +156,67 @@ class BackupWorker {
|
|||||||
workspaceName: ws.workspaceName ?? '',
|
workspaceName: ws.workspaceName ?? '',
|
||||||
workspaceUrl: ws.workspaceUrl ?? ''
|
workspaceUrl: ws.workspaceUrl ?? ''
|
||||||
}
|
}
|
||||||
const pipeline = await this.pipelineFactory(ctx, wsUrl, true, () => {}, null)
|
processed += (await ctx.with(
|
||||||
|
'backup',
|
||||||
|
{ workspace: ws.workspace },
|
||||||
|
async (ctx) =>
|
||||||
|
await backup(ctx, '', getWorkspaceId(ws.workspace), storage, {
|
||||||
|
skipDomains: [],
|
||||||
|
force: false,
|
||||||
|
recheck: false,
|
||||||
|
timeout: this.config.Timeout * 1000,
|
||||||
|
connectTimeout: 5 * 60 * 1000, // 5 minutes to,
|
||||||
|
blobDownloadLimit: 100,
|
||||||
|
skipBlobContentTypes: [],
|
||||||
|
storageAdapter: this.workspaceStorageAdapter,
|
||||||
|
getLastTx: async (): Promise<Tx | undefined> => {
|
||||||
|
const config = this.getConfig(ctx, wsUrl, null, this.workspaceStorageAdapter)
|
||||||
|
const adapterConf = config.adapters[config.domains[DOMAIN_TX]]
|
||||||
|
const hierarchy = new Hierarchy()
|
||||||
|
const modelDb = new ModelDb(hierarchy)
|
||||||
|
const txAdapter = await adapterConf.factory(
|
||||||
|
ctx,
|
||||||
|
hierarchy,
|
||||||
|
adapterConf.url,
|
||||||
|
wsUrl,
|
||||||
|
modelDb,
|
||||||
|
this.workspaceStorageAdapter
|
||||||
|
)
|
||||||
|
try {
|
||||||
|
await txAdapter.init?.()
|
||||||
|
|
||||||
await ctx.with('backup', { workspace: ws.workspace }, async (ctx) => {
|
return (
|
||||||
await backup(ctx, '', getWorkspaceId(ws.workspace), storage, {
|
await txAdapter.rawFindAll<Tx>(
|
||||||
skipDomains: [],
|
DOMAIN_TX,
|
||||||
force: false,
|
{},
|
||||||
recheck: false,
|
{ limit: 1, sort: { modifiedOn: SortingOrder.Descending } }
|
||||||
timeout: this.config.Timeout * 1000,
|
)
|
||||||
connectTimeout: 5 * 60 * 1000, // 5 minutes to,
|
).shift()
|
||||||
blobDownloadLimit: 100,
|
} finally {
|
||||||
skipBlobContentTypes: [],
|
await txAdapter.close()
|
||||||
storageAdapter: this.workspaceStorageAdapter,
|
}
|
||||||
connection: this.wrapPipeline(ctx, pipeline, wsUrl)
|
},
|
||||||
})
|
getConnection: async () => {
|
||||||
})
|
if (pipeline === undefined) {
|
||||||
|
pipeline = await this.pipelineFactory(ctx, wsUrl, true, () => {}, null)
|
||||||
|
}
|
||||||
|
return this.wrapPipeline(ctx, pipeline, wsUrl)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
))
|
||||||
|
? 1
|
||||||
|
: 0
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
rootCtx.error('\n\nFAILED to BACKUP', { workspace: ws.workspace, err })
|
rootCtx.error('\n\nFAILED to BACKUP', { workspace: ws.workspace, err })
|
||||||
failedWorkspaces.push(ws)
|
failedWorkspaces.push(ws)
|
||||||
await childLogger?.close()
|
await childLogger?.close()
|
||||||
|
} finally {
|
||||||
|
if (pipeline !== undefined) {
|
||||||
|
await pipeline.close()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return failedWorkspaces
|
return { failedWorkspaces, processed, skipped: workspaces.length - processed }
|
||||||
}
|
}
|
||||||
|
|
||||||
wrapPipeline (ctx: MeasureContext, pipeline: Pipeline, wsUrl: WorkspaceIdWithUrl): Client & BackupClient {
|
wrapPipeline (ctx: MeasureContext, pipeline: Pipeline, wsUrl: WorkspaceIdWithUrl): Client & BackupClient {
|
||||||
@ -213,15 +287,22 @@ export function backupService (
|
|||||||
storage: StorageAdapter,
|
storage: StorageAdapter,
|
||||||
config: BackupConfig,
|
config: BackupConfig,
|
||||||
pipelineFactory: PipelineFactory,
|
pipelineFactory: PipelineFactory,
|
||||||
workspaceStorageAdapter: StorageAdapter
|
workspaceStorageAdapter: StorageAdapter,
|
||||||
|
getConfig: (
|
||||||
|
ctx: MeasureContext,
|
||||||
|
workspace: WorkspaceIdWithUrl,
|
||||||
|
branding: Branding | null,
|
||||||
|
externalStorage: StorageAdapter
|
||||||
|
) => DbConfiguration
|
||||||
): () => void {
|
): () => void {
|
||||||
const backupWorker = new BackupWorker(storage, config, pipelineFactory, workspaceStorageAdapter)
|
const backupWorker = new BackupWorker(storage, config, pipelineFactory, workspaceStorageAdapter, getConfig)
|
||||||
|
|
||||||
const shutdown = (): void => {
|
const shutdown = (): void => {
|
||||||
void backupWorker.close()
|
void backupWorker.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
void backupWorker.backup(ctx).then(() => {
|
void backupWorker.backup(ctx).then((res) => {
|
||||||
|
backupWorker.printStats(ctx, res)
|
||||||
void backupWorker.schedule(ctx)
|
void backupWorker.schedule(ctx)
|
||||||
})
|
})
|
||||||
return shutdown
|
return shutdown
|
||||||
|
@ -82,7 +82,7 @@ class AdapterStorage implements BackupStorage {
|
|||||||
async exists (name: string): Promise<boolean> {
|
async exists (name: string): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
return (await this.client.stat(this.ctx, this.workspaceId, join(this.root, name))) !== undefined
|
return (await this.client.stat(this.ctx, this.workspaceId, join(this.root, name))) !== undefined
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -286,6 +286,15 @@ export class MinioService implements StorageAdapter {
|
|||||||
version: result.versionId ?? null
|
version: result.versionId ?? null
|
||||||
}
|
}
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
|
if (
|
||||||
|
err?.code === 'NoSuchKey' ||
|
||||||
|
err?.code === 'NotFound' ||
|
||||||
|
err?.message === 'No such key' ||
|
||||||
|
err?.Code === 'NoSuchKey'
|
||||||
|
) {
|
||||||
|
// Do not print error in this case
|
||||||
|
return
|
||||||
|
}
|
||||||
ctx.error('no object found', { error: err, objectName, workspaceId: workspaceId.name })
|
ctx.error('no object found', { error: err, objectName, workspaceId: workspaceId.name })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,6 +49,7 @@ import {
|
|||||||
createBenchmarkAdapter,
|
createBenchmarkAdapter,
|
||||||
createInMemoryAdapter,
|
createInMemoryAdapter,
|
||||||
createPipeline,
|
createPipeline,
|
||||||
|
DummyDbAdapter,
|
||||||
DummyFullTextAdapter,
|
DummyFullTextAdapter,
|
||||||
FullTextMiddleware,
|
FullTextMiddleware,
|
||||||
type AggregatorStorageAdapter,
|
type AggregatorStorageAdapter,
|
||||||
@ -154,6 +155,74 @@ export function createServerPipeline (
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
export function createBackupPipeline (
|
||||||
|
metrics: MeasureContext,
|
||||||
|
dbUrls: string,
|
||||||
|
opt: {
|
||||||
|
usePassedCtx?: boolean
|
||||||
|
adapterSecurity?: boolean
|
||||||
|
|
||||||
|
externalStorage: StorageAdapter
|
||||||
|
}
|
||||||
|
): PipelineFactory {
|
||||||
|
return (ctx, workspace, upgrade, broadcast, branding) => {
|
||||||
|
const metricsCtx = opt.usePassedCtx === true ? ctx : metrics
|
||||||
|
const wsMetrics = metricsCtx.newChild('🧲 backup', {})
|
||||||
|
const conf = getConfig(
|
||||||
|
metrics,
|
||||||
|
dbUrls,
|
||||||
|
workspace,
|
||||||
|
branding,
|
||||||
|
wsMetrics,
|
||||||
|
{
|
||||||
|
...opt,
|
||||||
|
fullTextUrl: '',
|
||||||
|
indexParallel: 0,
|
||||||
|
indexProcessing: 0,
|
||||||
|
rekoniUrl: '',
|
||||||
|
disableTriggers: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
adapters: {
|
||||||
|
FullTextBlob: {
|
||||||
|
factory: async () => new DummyDbAdapter(),
|
||||||
|
url: ''
|
||||||
|
}
|
||||||
|
},
|
||||||
|
fulltextAdapter: {
|
||||||
|
factory: async () => new DummyFullTextAdapter(),
|
||||||
|
stages: () => [],
|
||||||
|
url: ''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const middlewares: MiddlewareCreator[] = [
|
||||||
|
LowLevelMiddleware.create,
|
||||||
|
ContextNameMiddleware.create,
|
||||||
|
DomainFindMiddleware.create,
|
||||||
|
DBAdapterInitMiddleware.create,
|
||||||
|
ModelMiddleware.create,
|
||||||
|
DBAdapterMiddleware.create(conf)
|
||||||
|
]
|
||||||
|
|
||||||
|
const hierarchy = new Hierarchy()
|
||||||
|
const modelDb = new ModelDb(hierarchy)
|
||||||
|
const context: PipelineContext = {
|
||||||
|
workspace,
|
||||||
|
branding,
|
||||||
|
modelDb,
|
||||||
|
hierarchy,
|
||||||
|
storageAdapter: opt.externalStorage
|
||||||
|
}
|
||||||
|
return createPipeline(ctx, middlewares, context)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function getServerPipeline (
|
export async function getServerPipeline (
|
||||||
ctx: MeasureContext,
|
ctx: MeasureContext,
|
||||||
mongodbUri: string,
|
mongodbUri: string,
|
||||||
@ -211,7 +280,7 @@ export async function getServerPipeline (
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getConfig (
|
export function getConfig (
|
||||||
metrics: MeasureContext,
|
metrics: MeasureContext,
|
||||||
dbUrls: string,
|
dbUrls: string,
|
||||||
workspace: WorkspaceIdWithUrl,
|
workspace: WorkspaceIdWithUrl,
|
||||||
|
@ -865,16 +865,22 @@ class TSessionManager implements SessionManager {
|
|||||||
request: Request<any>,
|
request: Request<any>,
|
||||||
workspace: string // wsId, toWorkspaceString()
|
workspace: string // wsId, toWorkspaceString()
|
||||||
): void {
|
): void {
|
||||||
const userCtx = requestCtx.newChild('📞 client', {
|
const backupMode = service.getMode() === 'backup'
|
||||||
workspace: '🧲 ' + workspace
|
|
||||||
})
|
const userCtx = requestCtx.newChild(
|
||||||
|
'📞 client',
|
||||||
|
!backupMode
|
||||||
|
? {
|
||||||
|
workspace: '🧲 ' + workspace
|
||||||
|
}
|
||||||
|
: {}
|
||||||
|
)
|
||||||
|
|
||||||
// Calculate total number of clients
|
// Calculate total number of clients
|
||||||
const reqId = generateId()
|
const reqId = generateId()
|
||||||
|
|
||||||
const st = Date.now()
|
const st = Date.now()
|
||||||
try {
|
try {
|
||||||
const backupMode = 'loadChunk' in service
|
|
||||||
void userCtx.with(`🧭 ${backupMode ? 'handleBackup' : 'handleRequest'}`, {}, async (ctx) => {
|
void userCtx.with(`🧭 ${backupMode ? 'handleBackup' : 'handleRequest'}`, {}, async (ctx) => {
|
||||||
if (request.time != null) {
|
if (request.time != null) {
|
||||||
const delta = Date.now() - request.time
|
const delta = Date.now() - request.time
|
||||||
|
Loading…
Reference in New Issue
Block a user