mirror of
https://github.com/hcengineering/platform.git
synced 2024-12-22 19:11:33 +03:00
Add file moving tool (#6223)
This commit is contained in:
parent
74d76d34a4
commit
29b082fc7b
@ -1,6 +1,6 @@
|
||||
//
|
||||
// Copyright © 2020, 2021 Anticrm Platform Contributors.
|
||||
// Copyright © 2021 Hardcore Engineering Inc.
|
||||
// Copyright © 2021, 2024 Hardcore Engineering Inc.
|
||||
//
|
||||
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License. You may
|
||||
@ -74,7 +74,7 @@ import { consoleModelLogger, type MigrateOperation } from '@hcengineering/model'
|
||||
import contact from '@hcengineering/model-contact'
|
||||
import { getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
|
||||
import { openAIConfigDefaults } from '@hcengineering/openai'
|
||||
import type { StorageAdapter } from '@hcengineering/server-core'
|
||||
import type { StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core'
|
||||
import { deepEqual } from 'fast-equals'
|
||||
import { createWriteStream, readFileSync } from 'fs'
|
||||
import { benchmark, benchmarkWorker } from './benchmark'
|
||||
@ -95,6 +95,7 @@ import { fixJsonMarkup } from './markup'
|
||||
import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin'
|
||||
import { openAIConfig } from './openai'
|
||||
import { fixAccountEmails, renameAccount } from './renameAccount'
|
||||
import { moveFiles } from './storage'
|
||||
|
||||
const colorConstants = {
|
||||
colorRed: '\u001b[31m',
|
||||
@ -1040,6 +1041,37 @@ export function devTool (
|
||||
})
|
||||
})
|
||||
|
||||
program
|
||||
.command('move-files')
|
||||
.option('-w, --workspace <workspace>', 'Selected workspace only', '')
|
||||
.action(async (cmd: { workspace: string }) => {
|
||||
const { mongodbUri } = prepareTools()
|
||||
await withDatabase(mongodbUri, async (db, client) => {
|
||||
await withStorage(mongodbUri, async (adapter) => {
|
||||
try {
|
||||
const exAdapter = adapter as StorageAdapterEx
|
||||
if (exAdapter.adapters === undefined || exAdapter.adapters.size < 2) {
|
||||
throw new Error('bad storage config, at least two storage providers are required')
|
||||
}
|
||||
|
||||
console.log('moving files to storage provider', exAdapter.defaultAdapter)
|
||||
|
||||
const workspaces = await listWorkspacesPure(db, productId)
|
||||
for (const workspace of workspaces) {
|
||||
if (cmd.workspace !== '' && workspace.workspace !== cmd.workspace) {
|
||||
continue
|
||||
}
|
||||
|
||||
const wsId = getWorkspaceId(workspace.workspace, productId)
|
||||
await moveFiles(toolCtx, wsId, exAdapter)
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
program.command('fix-bw-workspace <workspace>').action(async (workspace: string) => {
|
||||
const { mongodbUri } = prepareTools()
|
||||
await withStorage(mongodbUri, async (adapter) => {
|
||||
|
60
dev/tool/src/storage.ts
Normal file
60
dev/tool/src/storage.ts
Normal file
@ -0,0 +1,60 @@
|
||||
//
|
||||
// Copyright © 2024 Hardcore Engineering Inc.
|
||||
//
|
||||
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License. You may
|
||||
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
//
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { type MeasureContext, type WorkspaceId } from '@hcengineering/core'
|
||||
import { type StorageAdapterEx } from '@hcengineering/server-core'
|
||||
import { PassThrough } from 'stream'
|
||||
|
||||
export async function moveFiles (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
exAdapter: StorageAdapterEx
|
||||
): Promise<void> {
|
||||
if (exAdapter.adapters === undefined) return
|
||||
|
||||
let count = 0
|
||||
|
||||
console.log('start', workspaceId.name)
|
||||
|
||||
// We assume that the adapter moves all new files to the default adapter
|
||||
const target = exAdapter.defaultAdapter
|
||||
await exAdapter.adapters.get(target)?.make(ctx, workspaceId)
|
||||
|
||||
for (const [name, adapter] of exAdapter.adapters.entries()) {
|
||||
if (name === target) continue
|
||||
|
||||
const iterator = await adapter.listStream(ctx, workspaceId)
|
||||
while (true) {
|
||||
const data = await iterator.next()
|
||||
if (data === undefined) break
|
||||
|
||||
const blob = await exAdapter.stat(ctx, workspaceId, data._id)
|
||||
if (blob === undefined) continue
|
||||
if (blob.provider === target) continue
|
||||
|
||||
const readable = await exAdapter.get(ctx, workspaceId, data._id)
|
||||
const stream = readable.pipe(new PassThrough())
|
||||
await exAdapter.put(ctx, workspaceId, data._id, stream, blob.contentType, blob.size)
|
||||
|
||||
count += 1
|
||||
if (count % 100 === 0) {
|
||||
console.log('...moved: ', count)
|
||||
}
|
||||
}
|
||||
await iterator.close()
|
||||
}
|
||||
|
||||
console.log('...done', workspaceId.name, count)
|
||||
}
|
@ -24,7 +24,7 @@ describe('aggregator tests', () => {
|
||||
const ws1: WorkspaceId = { name: 'ws1', productId: '' }
|
||||
return { mem1, mem2, aggr, ws1, testCtx }
|
||||
}
|
||||
it('reuse existing storage', async () => {
|
||||
it('not reuse existing storage', async () => {
|
||||
const { mem1, aggr, ws1, testCtx } = prepare1()
|
||||
|
||||
// Test default provider
|
||||
@ -37,7 +37,7 @@ describe('aggregator tests', () => {
|
||||
// Test content typed provider
|
||||
await aggr.put(testCtx, ws1, 'test', 'data2', 'text/plain')
|
||||
const stat2 = await aggr.stat(testCtx, ws1, 'test')
|
||||
expect(stat2?.provider).toEqual('mem1')
|
||||
expect(stat2?.provider).toEqual('mem2')
|
||||
|
||||
const dta = Buffer.concat(await aggr.read(testCtx, ws1, 'test')).toString()
|
||||
expect(dta).toEqual('data2')
|
||||
|
@ -317,12 +317,11 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
|
||||
contentType: string,
|
||||
size?: number | undefined
|
||||
): Promise<UploadedObjectInfo> {
|
||||
// We need to reuse same provider for existing documents.
|
||||
const stat = (
|
||||
await this.dbAdapter.find<Blob>(ctx, workspaceId, DOMAIN_BLOB, { _id: objectName as Ref<Blob> }, { limit: 1 })
|
||||
).shift()
|
||||
|
||||
const { provider, adapter } = this.selectProvider(stat?.provider, contentType)
|
||||
const { provider, adapter } = this.selectProvider(undefined, contentType)
|
||||
|
||||
const result = await adapter.put(ctx, workspaceId, objectName, stream, contentType, size)
|
||||
|
||||
@ -351,6 +350,13 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
|
||||
}
|
||||
|
||||
await this.dbAdapter.upload<Blob>(ctx, workspaceId, DOMAIN_BLOB, [blobDoc])
|
||||
|
||||
// If the file is already stored in different provider, we need to remove it.
|
||||
if (stat !== undefined && stat.provider !== provider) {
|
||||
const adapter = this.adapters.get(stat.provider)
|
||||
await adapter?.remove(ctx, workspaceId, [stat._id])
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user