UBERF-6161: Storage configuration (#5109)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2024-04-02 14:05:16 +07:00 committed by GitHub
parent 5da63b70f4
commit 0803bb4ea2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
50 changed files with 1254 additions and 713 deletions

File diff suppressed because it is too large Load Diff

View File

@ -75,6 +75,7 @@ services:
environment: environment:
- SERVER_PORT=8080 - SERVER_PORT=8080
- SERVER_SECRET=secret - SERVER_SECRET=secret
- MONGO_URL=mongodb://mongodb:27017
- ACCOUNTS_URL=http://localhost:3000 - ACCOUNTS_URL=http://localhost:3000
- REKONI_URL=http://localhost:4004 - REKONI_URL=http://localhost:4004
- CALENDAR_URL=http://localhost:8095 - CALENDAR_URL=http://localhost:8095

View File

@ -69,6 +69,7 @@ class InMemoryTxAdapter extends DummyDbAdapter implements TxAdapter {
* @public * @public
*/ */
export async function createInMemoryTxAdapter ( export async function createInMemoryTxAdapter (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
workspace: WorkspaceId workspace: WorkspaceId

View File

@ -18,6 +18,7 @@ import chunter, { type ChatMessage } from '@hcengineering/chunter'
import contact from '@hcengineering/contact' import contact from '@hcengineering/contact'
import core, { import core, {
DOMAIN_TX, DOMAIN_TX,
type MeasureContext,
SortingOrder, SortingOrder,
TxOperations, TxOperations,
TxProcessor, TxProcessor,
@ -43,6 +44,7 @@ import { MongoClient } from 'mongodb'
export const DOMAIN_ACTIVITY = 'activity' as Domain export const DOMAIN_ACTIVITY = 'activity' as Domain
export async function cleanWorkspace ( export async function cleanWorkspace (
ctx: MeasureContext,
mongoUrl: string, mongoUrl: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
storageAdapter: StorageAdapter, storageAdapter: StorageAdapter,
@ -67,14 +69,14 @@ export async function cleanWorkspace (
attachments.map((it) => it.file).concat(contacts.map((it) => it.avatar).filter((it) => it) as string[]) attachments.map((it) => it.file).concat(contacts.map((it) => it.avatar).filter((it) => it) as string[])
) )
const minioList = await storageAdapter.list(workspaceId) const minioList = await storageAdapter.list(ctx, workspaceId)
const toClean: string[] = [] const toClean: string[] = []
for (const mv of minioList) { for (const mv of minioList) {
if (!files.has(mv.name)) { if (!files.has(mv._id)) {
toClean.push(mv.name) toClean.push(mv._id)
} }
} }
await storageAdapter.remove(workspaceId, toClean) await storageAdapter.remove(ctx, workspaceId, toClean)
// connection.loadChunk(DOMAIN_BLOB, idx = ) // connection.loadChunk(DOMAIN_BLOB, idx = )
if (opt.recruit) { if (opt.recruit) {
@ -145,16 +147,20 @@ export async function cleanWorkspace (
} }
} }
export async function fixMinioBW (workspaceId: WorkspaceId, storageService: StorageAdapter): Promise<void> { export async function fixMinioBW (
ctx: MeasureContext,
workspaceId: WorkspaceId,
storageService: StorageAdapter
): Promise<void> {
console.log('try clean bw miniature for ', workspaceId.name) console.log('try clean bw miniature for ', workspaceId.name)
const from = new Date(new Date().setDate(new Date().getDate() - 7)) const from = new Date(new Date().setDate(new Date().getDate() - 7)).getTime()
const list = await storageService.list(workspaceId) const list = await storageService.list(ctx, workspaceId)
console.log('found', list.length) console.log('found', list.length)
let removed = 0 let removed = 0
for (const obj of list) { for (const obj of list) {
if (obj.lastModified < from) continue if (obj.modifiedOn < from) continue
if (obj.name.includes('%size%')) { if ((obj._id as string).includes('%size%')) {
await storageService.remove(workspaceId, [obj.name]) await storageService.remove(ctx, workspaceId, [obj._id])
removed++ removed++
if (removed % 100 === 0) { if (removed % 100 === 0) {
console.log('removed: ', removed) console.log('removed: ', removed)

View File

@ -1,5 +1,5 @@
import { dropWorkspace, setWorkspaceDisabled, type Workspace } from '@hcengineering/account' import { dropWorkspace, setWorkspaceDisabled, type Workspace } from '@hcengineering/account'
import core, { AccountRole, MeasureMetricsContext, SortingOrder } from '@hcengineering/core' import core, { AccountRole, type MeasureContext, MeasureMetricsContext, SortingOrder } from '@hcengineering/core'
import contact from '@hcengineering/model-contact' import contact from '@hcengineering/model-contact'
import { getWorkspaceDB } from '@hcengineering/mongo' import { getWorkspaceDB } from '@hcengineering/mongo'
import { type StorageAdapter } from '@hcengineering/server-core' import { type StorageAdapter } from '@hcengineering/server-core'
@ -7,6 +7,7 @@ import { connect } from '@hcengineering/server-tool'
import { type Db, type MongoClient } from 'mongodb' import { type Db, type MongoClient } from 'mongodb'
export async function checkOrphanWorkspaces ( export async function checkOrphanWorkspaces (
ctx: MeasureContext,
workspaces: Workspace[], workspaces: Workspace[],
transactorUrl: string, transactorUrl: string,
productId: string, productId: string,
@ -40,7 +41,7 @@ export async function checkOrphanWorkspaces (
// Find last transaction index: // Find last transaction index:
const wspace = { name: ws.workspace, productId } const wspace = { name: ws.workspace, productId }
const hasBucket = await storageAdapter.exists(wspace) const hasBucket = await storageAdapter.exists(ctx, wspace)
const [lastTx] = await connection.findAll( const [lastTx] = await connection.findAll(
core.class.Tx, core.class.Tx,
{ {
@ -69,12 +70,13 @@ export async function checkOrphanWorkspaces (
const workspaceDb = getWorkspaceDB(client, { name: ws.workspace, productId }) const workspaceDb = getWorkspaceDB(client, { name: ws.workspace, productId })
await workspaceDb.dropDatabase() await workspaceDb.dropDatabase()
if (storageAdapter !== undefined && hasBucket) { if (storageAdapter !== undefined && hasBucket) {
const docs = await storageAdapter.list(wspace) const docs = await storageAdapter.list(ctx, wspace)
await storageAdapter.remove( await storageAdapter.remove(
ctx,
wspace, wspace,
docs.map((it) => it.name) docs.map((it) => it._id)
) )
await storageAdapter?.delete(wspace) await storageAdapter.delete(ctx, wspace)
} }
} }
} }

View File

@ -410,6 +410,7 @@ export function devTool (
// We need to update workspaces with missing workspaceUrl // We need to update workspaces with missing workspaceUrl
await checkOrphanWorkspaces( await checkOrphanWorkspaces(
toolCtx,
workspaces, workspaces,
transactorUrl, transactorUrl,
productId, productId,
@ -482,7 +483,6 @@ export function devTool (
program program
.command('backup <dirName> <workspace>') .command('backup <dirName> <workspace>')
.description('dump workspace transactions and minio resources') .description('dump workspace transactions and minio resources')
.requiredOption('-i --index <index>', 'Index name for elastic')
.option('-s, --skip <skip>', 'A list of ; separated domain names to skip during backup', '') .option('-s, --skip <skip>', 'A list of ; separated domain names to skip during backup', '')
.option('-f, --force', 'Force backup', false) .option('-f, --force', 'Force backup', false)
.action(async (dirName: string, workspace: string, cmd: { skip: string, force: boolean }) => { .action(async (dirName: string, workspace: string, cmd: { skip: string, force: boolean }) => {
@ -518,7 +518,12 @@ export function devTool (
.description('dump workspace transactions and minio resources') .description('dump workspace transactions and minio resources')
.action(async (bucketName: string, dirName: string, workspace: string, cmd) => { .action(async (bucketName: string, dirName: string, workspace: string, cmd) => {
const { storageAdapter } = prepareTools() const { storageAdapter } = prepareTools()
const storage = await createStorageBackupStorage(storageAdapter, getWorkspaceId(bucketName, productId), dirName) const storage = await createStorageBackupStorage(
toolCtx,
storageAdapter,
getWorkspaceId(bucketName, productId),
dirName
)
await backup(transactorUrl, getWorkspaceId(workspace, productId), storage) await backup(transactorUrl, getWorkspaceId(workspace, productId), storage)
}) })
program program
@ -526,7 +531,7 @@ export function devTool (
.description('dump workspace transactions and minio resources') .description('dump workspace transactions and minio resources')
.action(async (bucketName: string, dirName: string, workspace: string, date, cmd) => { .action(async (bucketName: string, dirName: string, workspace: string, date, cmd) => {
const { storageAdapter } = prepareTools() const { storageAdapter } = prepareTools()
const storage = await createStorageBackupStorage(storageAdapter, getWorkspaceId(bucketName), dirName) const storage = await createStorageBackupStorage(toolCtx, storageAdapter, getWorkspaceId(bucketName), dirName)
await restore(transactorUrl, getWorkspaceId(workspace, productId), storage, parseInt(date ?? '-1')) await restore(transactorUrl, getWorkspaceId(workspace, productId), storage, parseInt(date ?? '-1'))
}) })
program program
@ -535,7 +540,12 @@ export function devTool (
.action(async (bucketName: string, dirName: string, cmd) => { .action(async (bucketName: string, dirName: string, cmd) => {
const { storageAdapter } = prepareTools() const { storageAdapter } = prepareTools()
const storage = await createStorageBackupStorage(storageAdapter, getWorkspaceId(bucketName, productId), dirName) const storage = await createStorageBackupStorage(
toolCtx,
storageAdapter,
getWorkspaceId(bucketName, productId),
dirName
)
await backupList(storage) await backupList(storage)
}) })
@ -576,7 +586,7 @@ export function devTool (
} }
console.log(`clearing ${workspace} history:`) console.log(`clearing ${workspace} history:`)
await clearTelegramHistory(mongodbUri, getWorkspaceId(workspace, productId), telegramDB, minio) await clearTelegramHistory(toolCtx, mongodbUri, getWorkspaceId(workspace, productId), telegramDB, minio)
}) })
}) })
@ -596,7 +606,7 @@ export function devTool (
for (const w of workspaces) { for (const w of workspaces) {
console.log(`clearing ${w.workspace} history:`) console.log(`clearing ${w.workspace} history:`)
await clearTelegramHistory(mongodbUri, getWorkspaceId(w.workspace, productId), telegramDB, minio) await clearTelegramHistory(toolCtx, mongodbUri, getWorkspaceId(w.workspace, productId), telegramDB, minio)
} }
}) })
}) })
@ -624,6 +634,7 @@ export function devTool (
const { mongodbUri, storageAdapter: minio } = prepareTools() const { mongodbUri, storageAdapter: minio } = prepareTools()
await withDatabase(mongodbUri, async (db) => { await withDatabase(mongodbUri, async (db) => {
await cleanWorkspace( await cleanWorkspace(
toolCtx,
mongodbUri, mongodbUri,
getWorkspaceId(workspace, productId), getWorkspaceId(workspace, productId),
minio, minio,
@ -636,7 +647,7 @@ export function devTool (
program.command('fix-bw-workspace <workspace>').action(async (workspace: string) => { program.command('fix-bw-workspace <workspace>').action(async (workspace: string) => {
const { storageAdapter: minio } = prepareTools() const { storageAdapter: minio } = prepareTools()
await fixMinioBW(getWorkspaceId(workspace, productId), minio) await fixMinioBW(toolCtx, getWorkspaceId(workspace, productId), minio)
}) })
program program

View File

@ -14,7 +14,7 @@
// limitations under the License. // limitations under the License.
// //
import { DOMAIN_TX, type Ref, type WorkspaceId } from '@hcengineering/core' import { DOMAIN_TX, type MeasureContext, type Ref, type WorkspaceId } from '@hcengineering/core'
import { type StorageAdapter } from '@hcengineering/server-core' import { type StorageAdapter } from '@hcengineering/server-core'
import { DOMAIN_ATTACHMENT } from '@hcengineering/model-attachment' import { DOMAIN_ATTACHMENT } from '@hcengineering/model-attachment'
import contact, { DOMAIN_CHANNEL } from '@hcengineering/model-contact' import contact, { DOMAIN_CHANNEL } from '@hcengineering/model-contact'
@ -29,6 +29,7 @@ const LastMessages = 'last-msgs'
* @public * @public
*/ */
export async function clearTelegramHistory ( export async function clearTelegramHistory (
ctx: MeasureContext,
mongoUrl: string, mongoUrl: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
tgDb: string, tgDb: string,
@ -90,7 +91,7 @@ export async function clearTelegramHistory (
workspaceDB.collection(DOMAIN_ATTACHMENT).deleteMany({ workspaceDB.collection(DOMAIN_ATTACHMENT).deleteMany({
attachedToClass: telegram.class.Message attachedToClass: telegram.class.Message
}), }),
storageAdapter.remove(workspaceId, Array.from(attachments)) storageAdapter.remove(ctx, workspaceId, Array.from(attachments))
]) ])
console.log('clearing telegram service data...') console.log('clearing telegram service data...')

View File

@ -21,6 +21,12 @@ export default mergeIds(coreId, core, {
Archived: '' as IntlString, Archived: '' as IntlString,
ClassLabel: '' as IntlString, ClassLabel: '' as IntlString,
ClassPropertyLabel: '' as IntlString, ClassPropertyLabel: '' as IntlString,
Members: '' as IntlString Members: '' as IntlString,
Blob: '' as IntlString,
BlobContentType: '' as IntlString,
BlobEtag: '' as IntlString,
BlobVersion: '' as IntlString,
BlobStorageId: '' as IntlString,
BlobSize: '' as IntlString
} }
}) })

View File

@ -14,7 +14,9 @@
// //
import { import {
type Blob,
DOMAIN_BLOB, DOMAIN_BLOB,
DOMAIN_BLOB_DATA,
DOMAIN_CONFIGURATION, DOMAIN_CONFIGURATION,
DOMAIN_DOC_INDEX_STATE, DOMAIN_DOC_INDEX_STATE,
DOMAIN_FULLTEXT_BLOB, DOMAIN_FULLTEXT_BLOB,
@ -63,6 +65,7 @@ import {
ReadOnly, ReadOnly,
TypeBoolean, TypeBoolean,
TypeIntlString, TypeIntlString,
TypeNumber,
TypeRecord, TypeRecord,
TypeRef, TypeRef,
TypeString, TypeString,
@ -129,6 +132,34 @@ export class TAttachedDoc extends TDoc implements AttachedDoc {
collection!: string collection!: string
} }
@Model(core.class.Blob, core.class.Doc, DOMAIN_BLOB_DATA)
@UX(core.string.Object)
export class TBlob extends TDoc implements Blob {
@Prop(TypeString(), core.string.Blob)
@ReadOnly()
provider!: string
@Prop(TypeString(), core.string.BlobContentType)
@ReadOnly()
contentType!: string
@Prop(TypeString(), core.string.BlobStorageId)
@ReadOnly()
storageId!: string
@Prop(TypeString(), core.string.BlobEtag)
@ReadOnly()
etag!: string
@Prop(TypeString(), core.string.BlobVersion)
@ReadOnly()
version!: string
@Prop(TypeNumber(), core.string.BlobSize)
@ReadOnly()
size!: number
}
@UX(core.string.ClassLabel) @UX(core.string.ClassLabel)
@Model(core.class.Class, core.class.Doc, DOMAIN_MODEL) @Model(core.class.Class, core.class.Doc, DOMAIN_MODEL)
export class TClass extends TDoc implements Class<Obj> { export class TClass extends TDoc implements Class<Obj> {

View File

@ -29,6 +29,7 @@ import {
TArrOf, TArrOf,
TAttachedDoc, TAttachedDoc,
TAttribute, TAttribute,
TBlob,
TBlobData, TBlobData,
TClass, TClass,
TCollection, TCollection,
@ -151,7 +152,8 @@ export function createModel (builder: Builder): void {
TIndexConfiguration, TIndexConfiguration,
TStatus, TStatus,
TStatusCategory, TStatusCategory,
TMigrationState TMigrationState,
TBlob
) )
builder.createDoc( builder.createDoc(

View File

@ -337,6 +337,12 @@ export const DOMAIN_TRANSIENT = 'transient' as Domain
*/ */
export const DOMAIN_BLOB = 'blob' as Domain export const DOMAIN_BLOB = 'blob' as Domain
/**
* Special domain to access s3 blob data.
* @public
*/
export const DOMAIN_BLOB_DATA = 'blob-data' as Domain
/** /**
* Special domain to access s3 blob data. * Special domain to access s3 blob data.
* @public * @public
@ -535,6 +541,29 @@ export interface IndexStageState extends Doc {
attributes: Record<string, any> attributes: Record<string, any>
} }
/**
* @public
*
* A blob document to manage blob attached documents.
*
* _id: is a platform ID and it created using our regular generateId(),
* and storageId is a provider specified storage id.
*/
export interface Blob extends Doc {
// Provider
provider: string
// A provider specific id
storageId: string
// A content type for blob
contentType: string
// A etag for blob
etag: string
// Document version if supported by provider
version: string | null
// A document size
size: number
}
/** /**
* @public * @public
* *

View File

@ -20,6 +20,7 @@ import type {
AnyAttribute, AnyAttribute,
ArrOf, ArrOf,
AttachedDoc, AttachedDoc,
Blob,
BlobData, BlobData,
Class, Class,
Collection, Collection,
@ -82,6 +83,7 @@ export default plugin(coreId, {
class: { class: {
Obj: '' as Ref<Class<Obj>>, Obj: '' as Ref<Class<Obj>>,
Doc: '' as Ref<Class<Doc>>, Doc: '' as Ref<Class<Doc>>,
Blob: '' as Ref<Class<Blob>>,
AttachedDoc: '' as Ref<Class<AttachedDoc>>, AttachedDoc: '' as Ref<Class<AttachedDoc>>,
Class: '' as Ref<Class<Class<Obj>>>, Class: '' as Ref<Class<Class<Obj>>>,
Mixin: '' as Ref<Class<Mixin<Doc>>>, Mixin: '' as Ref<Class<Mixin<Doc>>>,

View File

@ -13,7 +13,7 @@
// limitations under the License. // limitations under the License.
// //
import { getWorkspaceId } from '@hcengineering/core' import { getWorkspaceId, MeasureMetricsContext } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio' import { MinioService } from '@hcengineering/minio'
import { setMetadata } from '@hcengineering/platform' import { setMetadata } from '@hcengineering/platform'
import { backup, createStorageBackupStorage } from '@hcengineering/server-backup' import { backup, createStorageBackupStorage } from '@hcengineering/server-backup'
@ -92,10 +92,12 @@ export class PlatformWorker {
async backup (): Promise<void> { async backup (): Promise<void> {
const workspaces = await getWorkspaces() const workspaces = await getWorkspaces()
const ctx = new MeasureMetricsContext('backup', {})
for (const ws of workspaces) { for (const ws of workspaces) {
console.log('\n\nBACKUP WORKSPACE ', ws.workspace, ws.productId) console.log('\n\nBACKUP WORKSPACE ', ws.workspace, ws.productId)
try { try {
const storage = await createStorageBackupStorage( const storage = await createStorageBackupStorage(
ctx,
this.storageAdapter, this.storageAdapter,
getWorkspaceId('backups', ws.productId), getWorkspaceId('backups', ws.productId),
ws.workspace ws.workspace

View File

@ -15,87 +15,29 @@
// //
// Add this to the VERY top of the first file loaded in your app // Add this to the VERY top of the first file loaded in your app
import { setMetadata } from '@hcengineering/platform'
import serverCore from '@hcengineering/server-core'
import serverToken from '@hcengineering/server-token'
import { serverFactories } from '@hcengineering/server-ws'
import { start } from '.'
import serverNotification from '@hcengineering/server-notification'
import contactPlugin from '@hcengineering/contact' import contactPlugin from '@hcengineering/contact'
import { setMetadata } from '@hcengineering/platform'
import { serverConfigFromEnv, storageConfigFromEnv } from '@hcengineering/server'
import serverCore, { type StorageConfiguration } from '@hcengineering/server-core'
import serverNotification from '@hcengineering/server-notification'
import serverToken from '@hcengineering/server-token'
import { start } from '.'
const serverPort = parseInt(process.env.SERVER_PORT ?? '3333') const {
url,
frontUrl,
serverSecret,
sesUrl,
elasticUrl,
elasticIndexName,
accountsUrl,
rekoniUrl,
serverFactory,
serverPort,
enableCompression
} = serverConfigFromEnv()
const storageConfig: StorageConfiguration = storageConfigFromEnv()
const serverFactory = serverFactories[(process.env.SERVER_PROVIDER as string) ?? 'ws'] ?? serverFactories.ws
const enableCompression = (process.env.ENABLE_COMPRESSION ?? 'true') === 'true'
const url = process.env.MONGO_URL
if (url === undefined) {
console.error('please provide mongodb url')
process.exit(1)
}
const elasticUrl = process.env.ELASTIC_URL
if (elasticUrl === undefined) {
console.error('please provide elastic url')
process.exit(1)
}
const minioEndpoint = process.env.MINIO_ENDPOINT
if (minioEndpoint === undefined) {
console.error('MINIO_ENDPOINT is required')
process.exit(1)
}
const minioAccessKey = process.env.MINIO_ACCESS_KEY
if (minioAccessKey === undefined) {
console.error('MINIO_ACCESS_KEY is required')
process.exit(1)
}
const minioSecretKey = process.env.MINIO_SECRET_KEY
if (minioSecretKey === undefined) {
console.error('MINIO_SECRET_KEY is required')
process.exit(1)
}
const minioConf = {
endPoint: minioEndpoint,
accessKey: minioAccessKey,
secretKey: minioSecretKey
}
const serverSecret = process.env.SERVER_SECRET
if (serverSecret === undefined) {
console.log('Please provide server secret')
process.exit(1)
}
const rekoniUrl = process.env.REKONI_URL
if (rekoniUrl === undefined) {
console.log('Please provide REKONI_URL url')
process.exit(1)
}
const frontUrl = process.env.FRONT_URL
if (frontUrl === undefined) {
console.log('Please provide FRONT_URL url')
process.exit(1)
}
const accountsUrl = process.env.ACCOUNTS_URL
if (accountsUrl === undefined) {
console.log('Please provide ACCOUNTS_URL url')
process.exit(1)
}
const elasticIndexName = process.env.ELASTIC_INDEX_NAME
if (elasticIndexName === undefined) {
console.log('Please provide ELASTIC_INDEX_NAME')
process.exit(1)
}
const sesUrl = process.env.SES_URL
const cursorMaxTime = process.env.SERVER_CURSOR_MAXTIMEMS const cursorMaxTime = process.env.SERVER_CURSOR_MAXTIMEMS
const lastNameFirst = process.env.LAST_NAME_FIRST === 'true' const lastNameFirst = process.env.LAST_NAME_FIRST === 'true'
@ -114,7 +56,7 @@ console.log(
) )
const shutdown = start(url, { const shutdown = start(url, {
fullTextUrl: elasticUrl, fullTextUrl: elasticUrl,
minioConf, storageConfig,
rekoniUrl, rekoniUrl,
port: serverPort, port: serverPort,
serverFactory, serverFactory,

View File

@ -25,27 +25,26 @@ import {
type ServerStorage, type ServerStorage,
type WorkspaceId type WorkspaceId
} from '@hcengineering/core' } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { createElasticAdapter, createElasticBackupDataAdapter } from '@hcengineering/elastic' import { createElasticAdapter, createElasticBackupDataAdapter } from '@hcengineering/elastic'
import { import {
ConfigurationMiddleware, ConfigurationMiddleware,
ModifiedMiddleware, ModifiedMiddleware,
PrivateMiddleware, PrivateMiddleware,
QueryJoinMiddleware, QueryJoinMiddleware,
SpaceSecurityMiddleware, SpacePermissionsMiddleware,
SpacePermissionsMiddleware SpaceSecurityMiddleware
} from '@hcengineering/middleware' } from '@hcengineering/middleware'
import { createMongoAdapter, createMongoTxAdapter } from '@hcengineering/mongo' import { createMongoAdapter, createMongoTxAdapter } from '@hcengineering/mongo'
import { OpenAIEmbeddingsStage, openAIId, openAIPluginImpl } from '@hcengineering/openai' import { OpenAIEmbeddingsStage, openAIId, openAIPluginImpl } from '@hcengineering/openai'
import { addLocation, addStringsLoader } from '@hcengineering/platform' import { addLocation, addStringsLoader } from '@hcengineering/platform'
import { import {
BackupClientSession, BackupClientSession,
buildStorageFromConfig,
createNullAdapter, createNullAdapter,
createRekoniAdapter, createRekoniAdapter,
createStorageDataAdapter, createStorageDataAdapter,
createYDocAdapter, createYDocAdapter,
getMetricsContext, getMetricsContext
type MinioConfig
} from '@hcengineering/server' } from '@hcengineering/server'
import { serverActivityId } from '@hcengineering/server-activity' import { serverActivityId } from '@hcengineering/server-activity'
import { serverAttachmentId } from '@hcengineering/server-attachment' import { serverAttachmentId } from '@hcengineering/server-attachment'
@ -61,13 +60,14 @@ import {
FullTextPushStage, FullTextPushStage,
globalIndexer, globalIndexer,
IndexedFieldStage, IndexedFieldStage,
type StorageAdapter, type StorageConfiguration,
type ContentTextAdapter, type ContentTextAdapter,
type DbConfiguration, type DbConfiguration,
type FullTextAdapter, type FullTextAdapter,
type FullTextPipelineStage, type FullTextPipelineStage,
type MiddlewareCreator, type MiddlewareCreator,
type Pipeline type Pipeline,
type StorageAdapter
} from '@hcengineering/server-core' } from '@hcengineering/server-core'
import { serverDocumentId } from '@hcengineering/server-document' import { serverDocumentId } from '@hcengineering/server-document'
import { serverGmailId } from '@hcengineering/server-gmail' import { serverGmailId } from '@hcengineering/server-gmail'
@ -188,7 +188,7 @@ export function start (
dbUrl: string, dbUrl: string,
opt: { opt: {
fullTextUrl: string fullTextUrl: string
minioConf: MinioConfig storageConfig: StorageConfiguration
rekoniUrl: string rekoniUrl: string
port: number port: number
productId: string productId: string
@ -236,6 +236,9 @@ export function start (
] ]
const metrics = getMetricsContext() const metrics = getMetricsContext()
const externalStorage = buildStorageFromConfig(opt.storageConfig, dbUrl)
function createIndexStages ( function createIndexStages (
fullText: MeasureContext, fullText: MeasureContext,
workspace: WorkspaceId, workspace: WorkspaceId,
@ -361,12 +364,7 @@ export function start (
}, },
serviceAdapters: {}, serviceAdapters: {},
defaultContentAdapter: 'Rekoni', defaultContentAdapter: 'Rekoni',
storageFactory: () => storageFactory: () => externalStorage,
new MinioService({
...opt.minioConf,
port: 9000,
useSSL: false
}),
workspace workspace
} }
return createPipeline(ctx, conf, middlewares, upgrade, broadcast) return createPipeline(ctx, conf, middlewares, upgrade, broadcast)

View File

@ -24,7 +24,7 @@ import type { TriggerControl } from '@hcengineering/server-core'
*/ */
export async function OnAttachmentDelete ( export async function OnAttachmentDelete (
tx: Tx, tx: Tx,
{ findAll, hierarchy, fulltextFx, storageFx, removedMap }: TriggerControl { findAll, hierarchy, fulltextFx, storageFx, removedMap, ctx }: TriggerControl
): Promise<Tx[]> { ): Promise<Tx[]> {
const rmTx = TxProcessor.extractTx(tx) as TxRemoveDoc<Attachment> const rmTx = TxProcessor.extractTx(tx) as TxRemoveDoc<Attachment>
@ -39,7 +39,7 @@ export async function OnAttachmentDelete (
}) })
storageFx(async (adapter, bucket) => { storageFx(async (adapter, bucket) => {
await adapter.remove(bucket, [attach.file]) await adapter.remove(ctx, bucket, [attach.file])
}) })
return [] return []

View File

@ -106,12 +106,7 @@ export class CollaborativeContentRetrievalStage implements FullTextPipelineStage
if (collaborativeDoc !== undefined && collaborativeDoc !== '') { if (collaborativeDoc !== undefined && collaborativeDoc !== '') {
const { documentId } = parseCollaborativeDoc(collaborativeDoc) const { documentId } = parseCollaborativeDoc(collaborativeDoc)
let docInfo: any | undefined const docInfo: any | undefined = await this.storageAdapter?.stat(this.metrics, this.workspace, documentId)
try {
docInfo = await this.storageAdapter?.stat(this.workspace, documentId)
} catch (err: any) {
// not found.
}
if (docInfo !== undefined) { if (docInfo !== undefined) {
const digest = docInfo.etag const digest = docInfo.etag
@ -120,7 +115,7 @@ export class CollaborativeContentRetrievalStage implements FullTextPipelineStage
;(update as any)[docUpdKey(digestKey)] = digest ;(update as any)[docUpdKey(digestKey)] = digest
const contentType = ((docInfo.metaData['content-type'] as string) ?? '').split(';')[0] const contentType = ((docInfo.metaData['content-type'] as string) ?? '').split(';')[0]
const readable = await this.storageAdapter?.get(this.workspace, documentId) const readable = await this.storageAdapter?.get(this.metrics, this.workspace, documentId)
if (readable !== undefined) { if (readable !== undefined) {
let textContent = await this.metrics.with( let textContent = await this.metrics.with(

View File

@ -38,7 +38,7 @@ import { workbenchId } from '@hcengineering/workbench'
*/ */
export async function OnContactDelete ( export async function OnContactDelete (
tx: Tx, tx: Tx,
{ findAll, hierarchy, storageFx, removedMap, txFactory }: TriggerControl { findAll, hierarchy, storageFx, removedMap, txFactory, ctx }: TriggerControl
): Promise<Tx[]> { ): Promise<Tx[]> {
const rmTx = tx as TxRemoveDoc<Contact> const rmTx = tx as TxRemoveDoc<Contact>
@ -61,14 +61,15 @@ export async function OnContactDelete (
} }
storageFx(async (adapter, bucket) => { storageFx(async (adapter, bucket) => {
await adapter.remove(bucket, [avatar]) await adapter.remove(ctx, bucket, [avatar])
if (avatar != null) { if (avatar != null) {
const extra = await adapter.list(bucket, avatar) const extra = await adapter.list(ctx, bucket, avatar)
if (extra.length > 0) { if (extra.length > 0) {
await adapter.remove( await adapter.remove(
ctx,
bucket, bucket,
Array.from(extra.entries()).map((it) => it[1].name) Array.from(extra.entries()).map((it) => it[1]._id)
) )
} }
} }

View File

@ -837,7 +837,7 @@ export async function createWorkspace (
const initWS = getMetadata(toolPlugin.metadata.InitWorkspace) const initWS = getMetadata(toolPlugin.metadata.InitWorkspace)
const wsId = getWorkspaceId(workspaceInfo.workspace, productId) const wsId = getWorkspaceId(workspaceInfo.workspace, productId)
if (initWS !== undefined && (await getWorkspaceById(db, productId, initWS)) !== null) { if (initWS !== undefined && (await getWorkspaceById(db, productId, initWS)) !== null) {
client = await initModel(getTransactor(), wsId, txes, [], ctxModellogger) client = await initModel(ctx, getTransactor(), wsId, txes, [], ctxModellogger)
await client.close() await client.close()
await cloneWorkspace( await cloneWorkspace(
getTransactor(), getTransactor(),
@ -846,7 +846,7 @@ export async function createWorkspace (
) )
client = await upgradeModel(getTransactor(), wsId, txes, migrationOperation, ctxModellogger) client = await upgradeModel(getTransactor(), wsId, txes, migrationOperation, ctxModellogger)
} else { } else {
client = await initModel(getTransactor(), wsId, txes, migrationOperation, ctxModellogger) client = await initModel(ctx, getTransactor(), wsId, txes, migrationOperation, ctxModellogger)
} }
} catch (err: any) { } catch (err: any) {
return { workspaceInfo, err, client: {} as any } return { workspaceInfo, err, client: {} as any }

View File

@ -1,4 +1,4 @@
import { WorkspaceId } from '@hcengineering/core' import { MeasureContext, WorkspaceId } from '@hcengineering/core'
import { StorageAdapter } from '@hcengineering/server-core' import { StorageAdapter } from '@hcengineering/server-core'
import { createReadStream, createWriteStream, existsSync } from 'fs' import { createReadStream, createWriteStream, existsSync } from 'fs'
import { mkdir, readFile, writeFile } from 'fs/promises' import { mkdir, readFile, writeFile } from 'fs/promises'
@ -55,35 +55,43 @@ class AdapterStorage implements BackupStorage {
constructor ( constructor (
readonly client: StorageAdapter, readonly client: StorageAdapter,
readonly workspaceId: WorkspaceId, readonly workspaceId: WorkspaceId,
readonly root: string readonly root: string,
readonly ctx: MeasureContext
) {} ) {}
async loadFile (name: string): Promise<Buffer> { async loadFile (name: string): Promise<Buffer> {
const data = await this.client.read(this.workspaceId, join(this.root, name)) const data = await this.client.read(this.ctx, this.workspaceId, join(this.root, name))
return Buffer.concat(data) return Buffer.concat(data)
} }
async write (name: string): Promise<Writable> { async write (name: string): Promise<Writable> {
const wr = new PassThrough() const wr = new PassThrough()
void this.client.put(this.workspaceId, join(this.root, name), wr) void this.client.put(this.ctx, this.workspaceId, join(this.root, name), wr, 'application/octet-stream')
return wr return wr
} }
async load (name: string): Promise<Readable> { async load (name: string): Promise<Readable> {
return await this.client.get(this.workspaceId, join(this.root, name)) return await this.client.get(this.ctx, this.workspaceId, join(this.root, name))
} }
async exists (name: string): Promise<boolean> { async exists (name: string): Promise<boolean> {
try { try {
await this.client.stat(this.workspaceId, join(this.root, name)) return (await this.client.stat(this.ctx, this.workspaceId, join(this.root, name))) !== undefined
return true
} catch (err) { } catch (err) {
return false return false
} }
} }
async writeFile (name: string, data: string | Buffer): Promise<void> { async writeFile (name: string, data: string | Buffer): Promise<void> {
void this.client.put(this.workspaceId, join(this.root, name), data, data.length) // TODO: add mime type detection here.
await this.client.put(
this.ctx,
this.workspaceId,
join(this.root, name),
data,
'application/octet-stream',
data.length
)
} }
} }
@ -101,12 +109,13 @@ export async function createFileBackupStorage (fileName: string): Promise<Backup
* @public * @public
*/ */
export async function createStorageBackupStorage ( export async function createStorageBackupStorage (
ctx: MeasureContext,
client: StorageAdapter, client: StorageAdapter,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
root: string root: string
): Promise<BackupStorage> { ): Promise<BackupStorage> {
if (!(await client.exists(workspaceId))) { if (!(await client.exists(ctx, workspaceId))) {
await client.make(workspaceId) await client.make(ctx, workspaceId)
} }
return new AdapterStorage(client, workspaceId, root) return new AdapterStorage(client, workspaceId, root, ctx)
} }

View File

@ -47,7 +47,7 @@ export async function loadCollaborativeDoc (
return await ctx.with('loadCollaborativeDoc', { type: 'content' }, async (ctx) => { return await ctx.with('loadCollaborativeDoc', { type: 'content' }, async (ctx) => {
const yContent = await ctx.with('yDocFromMinio', { type: 'content' }, async () => { const yContent = await ctx.with('yDocFromMinio', { type: 'content' }, async () => {
return await yDocFromStorage(storageAdapter, workspace, documentId, new YDoc({ gc: false })) return await yDocFromStorage(ctx, storageAdapter, workspace, documentId, new YDoc({ gc: false }))
}) })
// the document does not exist // the document does not exist
@ -60,7 +60,7 @@ export async function loadCollaborativeDoc (
} }
const yHistory = await ctx.with('yDocFromMinio', { type: 'history' }, async () => { const yHistory = await ctx.with('yDocFromMinio', { type: 'history' }, async () => {
return await yDocFromStorage(storageAdapter, workspace, historyDocumentId, new YDoc()) return await yDocFromStorage(ctx, storageAdapter, workspace, historyDocumentId, new YDoc())
}) })
// the history document does not exist // the history document does not exist
@ -98,7 +98,7 @@ export async function saveCollaborativeDocVersion (
await ctx.with('saveCollaborativeDoc', {}, async (ctx) => { await ctx.with('saveCollaborativeDoc', {}, async (ctx) => {
if (versionId === 'HEAD') { if (versionId === 'HEAD') {
await ctx.with('yDocToMinio', {}, async () => { await ctx.with('yDocToMinio', {}, async () => {
await yDocToStorage(storageAdapter, workspace, documentId, ydoc) await yDocToStorage(ctx, storageAdapter, workspace, documentId, ydoc)
}) })
} else { } else {
console.warn('Cannot save non HEAD document version', documentId, versionId) console.warn('Cannot save non HEAD document version', documentId, versionId)
@ -125,7 +125,7 @@ export async function removeCollaborativeDoc (
} }
if (toRemove.length > 0) { if (toRemove.length > 0) {
await ctx.with('remove', {}, async () => { await ctx.with('remove', {}, async () => {
await storageAdapter.remove(workspace, toRemove) await storageAdapter.remove(ctx, workspace, toRemove)
}) })
} }
}) })
@ -181,7 +181,7 @@ export async function takeCollaborativeDocSnapshot (
await ctx.with('takeCollaborativeDocSnapshot', {}, async (ctx) => { await ctx.with('takeCollaborativeDocSnapshot', {}, async (ctx) => {
const yHistory = const yHistory =
(await ctx.with('yDocFromMinio', { type: 'history' }, async () => { (await ctx.with('yDocFromMinio', { type: 'history' }, async () => {
return await yDocFromStorage(storageAdapter, workspace, historyDocumentId, new YDoc({ gc: false })) return await yDocFromStorage(ctx, storageAdapter, workspace, historyDocumentId, new YDoc({ gc: false }))
})) ?? new YDoc() })) ?? new YDoc()
await ctx.with('createYdocSnapshot', {}, async () => { await ctx.with('createYdocSnapshot', {}, async () => {
@ -189,7 +189,7 @@ export async function takeCollaborativeDocSnapshot (
}) })
await ctx.with('yDocToMinio', { type: 'history' }, async () => { await ctx.with('yDocToMinio', { type: 'history' }, async () => {
await yDocToStorage(storageAdapter, workspace, historyDocumentId, yHistory) await yDocToStorage(ctx, storageAdapter, workspace, historyDocumentId, yHistory)
}) })
}) })
} }

View File

@ -13,7 +13,7 @@
// limitations under the License. // limitations under the License.
// //
import { WorkspaceId } from '@hcengineering/core' import { MeasureContext, WorkspaceId } from '@hcengineering/core'
import { StorageAdapter } from '@hcengineering/server-core' import { StorageAdapter } from '@hcengineering/server-core'
import { Doc as YDoc } from 'yjs' import { Doc as YDoc } from 'yjs'
@ -21,6 +21,7 @@ import { yDocFromBuffer, yDocToBuffer } from './ydoc'
/** @public */ /** @public */
export async function yDocFromStorage ( export async function yDocFromStorage (
ctx: MeasureContext,
storageAdapter: StorageAdapter, storageAdapter: StorageAdapter,
workspace: WorkspaceId, workspace: WorkspaceId,
minioDocumentId: string, minioDocumentId: string,
@ -31,7 +32,7 @@ export async function yDocFromStorage (
ydoc ??= new YDoc({ gc: false }) ydoc ??= new YDoc({ gc: false })
try { try {
const buffer = await storageAdapter.read(workspace, minioDocumentId) const buffer = await storageAdapter.read(ctx, workspace, minioDocumentId)
return yDocFromBuffer(Buffer.concat(buffer), ydoc) return yDocFromBuffer(Buffer.concat(buffer), ydoc)
} catch (err: any) { } catch (err: any) {
if (err?.code === 'NoSuchKey' || err?.code === 'NotFound') { if (err?.code === 'NoSuchKey' || err?.code === 'NotFound') {
@ -43,12 +44,12 @@ export async function yDocFromStorage (
/** @public */ /** @public */
export async function yDocToStorage ( export async function yDocToStorage (
ctx: MeasureContext,
storageAdapter: StorageAdapter, storageAdapter: StorageAdapter,
workspace: WorkspaceId, workspace: WorkspaceId,
minioDocumentId: string, minioDocumentId: string,
ydoc: YDoc ydoc: YDoc
): Promise<void> { ): Promise<void> {
const buffer = yDocToBuffer(ydoc) const buffer = yDocToBuffer(ydoc)
const metadata = { 'content-type': 'application/ydoc' } await storageAdapter.put(ctx, workspace, minioDocumentId, buffer, 'application/ydoc', buffer.length)
await storageAdapter.put(workspace, minioDocumentId, buffer, buffer.length, metadata)
} }

View File

@ -39,7 +39,7 @@ export async function removeDocument (
const historyDocumentId = collaborativeHistoryDocId(minioDocumentId) const historyDocumentId = collaborativeHistoryDocId(minioDocumentId)
try { try {
await minio.remove(workspaceId, [minioDocumentId, historyDocumentId]) await minio.remove(ctx, workspaceId, [minioDocumentId, historyDocumentId])
} catch (err) { } catch (err) {
await ctx.error('failed to remove document', { documentId, error: err }) await ctx.error('failed to remove document', { documentId, error: err })
} }

View File

@ -57,7 +57,7 @@ export async function takeSnapshot (
const historyDocumentId = collaborativeHistoryDocId(minioDocumentId) const historyDocumentId = collaborativeHistoryDocId(minioDocumentId)
const yHistory = const yHistory =
(await ctx.with('yDocFromMinio', {}, async () => { (await ctx.with('yDocFromMinio', {}, async () => {
return await yDocFromStorage(minio, workspaceId, historyDocumentId) return await yDocFromStorage(ctx, minio, workspaceId, historyDocumentId)
})) ?? new YDoc() })) ?? new YDoc()
await ctx.with('createYdocSnapshot', {}, async () => { await ctx.with('createYdocSnapshot', {}, async () => {
@ -67,7 +67,7 @@ export async function takeSnapshot (
}) })
await ctx.with('yDocToMinio', {}, async () => { await ctx.with('yDocToMinio', {}, async () => {
await yDocToStorage(minio, workspaceId, historyDocumentId, yHistory) await yDocToStorage(ctx, minio, workspaceId, historyDocumentId, yHistory)
}) })
return { ...version } return { ...version }

View File

@ -32,13 +32,15 @@
"@types/html-to-text": "^8.1.1", "@types/html-to-text": "^8.1.1",
"jest": "^29.7.0", "jest": "^29.7.0",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"@types/jest": "^29.5.5" "@types/jest": "^29.5.5",
"@types/uuid": "^8.3.1"
}, },
"dependencies": { "dependencies": {
"@hcengineering/core": "^0.6.28", "@hcengineering/core": "^0.6.28",
"@hcengineering/platform": "^0.6.9", "@hcengineering/platform": "^0.6.9",
"@hcengineering/query": "^0.6.8", "@hcengineering/query": "^0.6.8",
"fast-equals": "^2.0.3", "fast-equals": "^2.0.3",
"html-to-text": "^9.0.3" "html-to-text": "^9.0.3",
"uuid": "^8.3.2"
} }
} }

View File

@ -33,6 +33,19 @@ import {
} from '@hcengineering/core' } from '@hcengineering/core'
import { type StorageAdapter } from './storage' import { type StorageAdapter } from './storage'
/**
* @public
*/
export interface RawDBAdapter {
find: <T extends Doc>(
workspace: WorkspaceId,
domain: Domain,
query: DocumentQuery<T>,
options?: Omit<FindOptions<T>, 'projection' | 'lookup'>
) => Promise<FindResult<T>>
upload: <T extends Doc>(workspace: WorkspaceId, domain: Domain, docs: T[]) => Promise<void>
}
/** /**
* @public * @public
*/ */
@ -77,6 +90,7 @@ export interface TxAdapter extends DbAdapter {
* @public * @public
*/ */
export type DbAdapterFactory = ( export type DbAdapterFactory = (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,

View File

@ -100,12 +100,7 @@ export class ContentRetrievalStage implements FullTextPipelineStage {
// We need retrieve value of attached document content. // We need retrieve value of attached document content.
const ref = doc.attributes[docKey(val.name, { _class: val.attributeOf })] as Ref<Doc> const ref = doc.attributes[docKey(val.name, { _class: val.attributeOf })] as Ref<Doc>
if (ref !== undefined && ref !== '') { if (ref !== undefined && ref !== '') {
let docInfo: any | undefined const docInfo: any | undefined = await this.storageAdapter?.stat(this.metrics, this.workspace, ref)
try {
docInfo = await this.storageAdapter?.stat(this.workspace, ref)
} catch (err: any) {
// not found.
}
if (docInfo !== undefined && docInfo.size < 30 * 1024 * 1024) { if (docInfo !== undefined && docInfo.size < 30 * 1024 * 1024) {
// We have blob, we need to decode it to string. // We have blob, we need to decode it to string.
const contentType = ((docInfo.metaData['content-type'] as string) ?? '').split(';')[0] const contentType = ((docInfo.metaData['content-type'] as string) ?? '').split(';')[0]
@ -116,7 +111,7 @@ export class ContentRetrievalStage implements FullTextPipelineStage {
if (doc.attributes[digestKey] !== digest) { if (doc.attributes[digestKey] !== digest) {
;(update as any)[docUpdKey(digestKey)] = digest ;(update as any)[docUpdKey(digestKey)] = digest
const readable = await this.storageAdapter?.get(this.workspace, ref) const readable = await this.storageAdapter?.get(this.metrics, this.workspace, ref)
if (readable !== undefined) { if (readable !== undefined) {
let textContent = await this.metrics.with( let textContent = await this.metrics.with(

View File

@ -115,6 +115,7 @@ class InMemoryAdapter extends DummyDbAdapter implements DbAdapter {
* @public * @public
*/ */
export async function createInMemoryAdapter ( export async function createInMemoryAdapter (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
workspaceId: WorkspaceId workspaceId: WorkspaceId

View File

@ -0,0 +1,197 @@
import core, {
DOMAIN_BLOB_DATA,
generateId,
groupByArray,
type Blob,
type MeasureContext,
type Ref,
type WorkspaceId
} from '@hcengineering/core'
import { type Readable } from 'stream'
import { type RawDBAdapter } from '../adapter'
import { type ListBlobResult, type StorageAdapter, type UploadedObjectInfo } from '../storage'
import { v4 as uuid } from 'uuid'
import { type StorageConfig, type StorageConfiguration } from '../types'
/**
* Perform operations on storage adapter and map required information into BinaryDocument into provided DbAdapter storage.
*/
export class AggregatorStorageAdapter implements StorageAdapter {
constructor (
readonly adapters: Map<string, StorageAdapter>,
readonly defaultAdapter: string, // Adapter will be used to put new documents into
readonly dbAdapter: RawDBAdapter
) {}
async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
// We need to initialize internal table if it miss documents.
}
async exists (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<boolean> {
for (const a of this.adapters.values()) {
if (!(await a.exists(ctx, workspaceId))) {
return false
}
}
return true
}
async make (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
for (const a of this.adapters.values()) {
if (!(await a.exists(ctx, workspaceId))) {
await a.make(ctx, workspaceId)
}
}
}
async delete (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
for (const a of this.adapters.values()) {
if (await a.exists(ctx, workspaceId)) {
await a.delete(ctx, workspaceId)
}
}
}
async remove (ctx: MeasureContext, workspaceId: WorkspaceId, objectNames: string[]): Promise<void> {
const docs = await this.dbAdapter.find<Blob>(workspaceId, DOMAIN_BLOB_DATA, {
_class: core.class.Blob,
_id: { $in: objectNames as Ref<Blob>[] }
})
// Group by provider and delegate into it.
const byProvider = groupByArray(docs, (item) => item.provider)
for (const [k, docs] of byProvider) {
const adapter = this.adapters.get(k)
if (adapter !== undefined) {
await adapter.remove(
ctx,
workspaceId,
docs.map((it) => it._id)
)
}
}
}
async list (ctx: MeasureContext, workspaceId: WorkspaceId, prefix?: string | undefined): Promise<ListBlobResult[]> {
return await this.dbAdapter.find<Blob>(workspaceId, DOMAIN_BLOB_DATA, {
_class: core.class.Blob,
_id: { $regex: `/^${prefix ?? ''}/i` }
})
}
async stat (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise<Blob | undefined> {
return (
await this.dbAdapter.find<Blob>(
workspaceId,
DOMAIN_BLOB_DATA,
{ _class: core.class.Blob, _id: name as Ref<Blob> },
{ limit: 1 }
)
).shift()
}
async get (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise<Readable> {
const { provider, stat } = await this.findProvider(workspaceId, ctx, name)
return await provider.get(ctx, workspaceId, stat.storageId)
}
private async findProvider (
workspaceId: WorkspaceId,
ctx: MeasureContext,
objectName: string
): Promise<{ provider: StorageAdapter, stat: Blob }> {
const stat = (
await this.dbAdapter.find<Blob>(
workspaceId,
DOMAIN_BLOB_DATA,
{ _class: core.class.Blob, _id: objectName as Ref<Blob> },
{ limit: 1 }
)
).shift()
if (stat === undefined) {
throw new Error('No such object found')
}
const provider = this.adapters.get(stat.provider)
if (provider === undefined) {
throw new Error('No such provider found')
}
return { provider, stat }
}
async partial (
ctx: MeasureContext,
workspaceId: WorkspaceId,
objectName: string,
offset: number,
length?: number | undefined
): Promise<Readable> {
const { provider, stat } = await this.findProvider(workspaceId, ctx, objectName)
return await provider.partial(ctx, workspaceId, stat.storageId, offset, length)
}
async read (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise<Buffer[]> {
const { provider, stat } = await this.findProvider(workspaceId, ctx, name)
return await provider.read(ctx, workspaceId, stat.storageId)
}
async put (
ctx: MeasureContext,
workspaceId: WorkspaceId,
objectName: string,
stream: string | Readable | Buffer,
contentType: string,
size?: number | undefined
): Promise<UploadedObjectInfo> {
const provider = this.adapters.get(this.defaultAdapter)
if (provider === undefined) {
throw new Error('No such provider found')
}
const storageId = uuid()
const result = await provider.put(ctx, workspaceId, storageId, stream, contentType, size)
if (size === undefined || size === 0) {
const docStats = await provider.stat(ctx, workspaceId, storageId)
if (docStats !== undefined) {
if (contentType !== docStats.contentType) {
contentType = docStats.contentType
}
size = docStats.size
}
}
const blobDoc: Blob = {
_class: core.class.Blob,
_id: generateId(),
modifiedBy: core.account.System,
modifiedOn: Date.now(),
space: core.space.Configuration,
provider: this.defaultAdapter,
storageId,
size: size ?? 0,
contentType,
etag: result.etag,
version: result.versionId ?? null
}
await this.dbAdapter.upload<Blob>(workspaceId, DOMAIN_BLOB_DATA, [blobDoc])
return result
}
}
/**
* @public
*/
export function buildStorage (
config: StorageConfiguration,
dbAdapter: RawDBAdapter,
storageFactory: (kind: string, config: StorageConfig) => StorageAdapter
): StorageAdapter {
const adapters = new Map<string, StorageAdapter>()
for (const c of config.storages) {
adapters.set(c.name, storageFactory(c.kind, c))
}
return new AggregatorStorageAdapter(adapters, config.default, dbAdapter)
}

View File

@ -35,11 +35,11 @@ import { type DbConfiguration } from '../configuration'
import { createContentAdapter } from '../content' import { createContentAdapter } from '../content'
import { FullTextIndex } from '../fulltext' import { FullTextIndex } from '../fulltext'
import { FullTextIndexPipeline } from '../indexer' import { FullTextIndexPipeline } from '../indexer'
import { createServiceAdaptersManager } from '../service'
import { type StorageAdapter } from '../storage' import { type StorageAdapter } from '../storage'
import { Triggers } from '../triggers' import { Triggers } from '../triggers'
import { type ServerStorageOptions } from '../types' import { type ServerStorageOptions } from '../types'
import { TServerStorage } from './storage' import { TServerStorage } from './storage'
import { createServiceAdaptersManager } from '../service'
/** /**
* @public * @public
@ -58,7 +58,10 @@ export async function createServerStorage (
for (const key in conf.adapters) { for (const key in conf.adapters) {
const adapterConf = conf.adapters[key] const adapterConf = conf.adapters[key]
adapters.set(key, await adapterConf.factory(hierarchy, adapterConf.url, conf.workspace, modelDb, storageAdapter)) adapters.set(
key,
await adapterConf.factory(ctx, hierarchy, adapterConf.url, conf.workspace, modelDb, storageAdapter)
)
} }
const txAdapter = adapters.get(conf.domains[DOMAIN_TX]) as TxAdapter const txAdapter = adapters.get(conf.domains[DOMAIN_TX]) as TxAdapter
@ -187,17 +190,21 @@ export async function createServerStorage (
*/ */
export function createNullStorageFactory (): StorageAdapter { export function createNullStorageFactory (): StorageAdapter {
return { return {
exists: async (workspaceId: WorkspaceId) => { initialize: async (ctx, workspaceId) => {},
exists: async (ctx, workspaceId: WorkspaceId) => {
return false return false
}, },
make: async (workspaceId: WorkspaceId) => {}, make: async (ctx, workspaceId: WorkspaceId) => {},
remove: async (workspaceId: WorkspaceId, objectNames: string[]) => {}, remove: async (ctx, workspaceId: WorkspaceId, objectNames: string[]) => {},
delete: async (workspaceId: WorkspaceId) => {}, delete: async (ctx, workspaceId: WorkspaceId) => {},
list: async (workspaceId: WorkspaceId, prefix?: string) => [], list: async (ctx, workspaceId: WorkspaceId, prefix?: string) => [],
stat: async (workspaceId: WorkspaceId, objectName: string) => ({}) as any, stat: async (ctx, workspaceId: WorkspaceId, objectName: string) => ({}) as any,
get: async (workspaceId: WorkspaceId, objectName: string) => ({}) as any, get: async (ctx, workspaceId: WorkspaceId, objectName: string) => ({}) as any,
put: async (workspaceId: WorkspaceId, objectName: string, stream: any, size?: number, qwe?: any) => ({}) as any, put: async (ctx, workspaceId: WorkspaceId, objectName: string, stream: any, contentType: string, size?: number) =>
read: async (workspaceId: WorkspaceId, name: string) => ({}) as any, ({}) as any,
partial: async (workspaceId: WorkspaceId, objectName: string, offset: number, length?: number) => ({}) as any read: async (ctx, workspaceId: WorkspaceId, name: string) => ({}) as any,
partial: async (ctx, workspaceId: WorkspaceId, objectName: string, offset: number, length?: number) => ({}) as any
} }
} }
export { AggregatorStorageAdapter, buildStorage } from './aggregator'

View File

@ -1,51 +1,11 @@
import { type WorkspaceId, toWorkspaceString } from '@hcengineering/core' import { type Blob, type MeasureContext, type WorkspaceId, toWorkspaceString } from '@hcengineering/core'
import { type Readable } from 'stream' import { type Readable } from 'stream'
export interface MetadataItem {
Key: string
Value: string
}
export type BucketItem =
| {
name: string
size: number
etag: string
prefix?: never
lastModified: Date
}
| {
name?: never
etag?: never
lastModified?: never
prefix: string
size: 0
}
export interface BucketItemStat {
size: number
etag: string
lastModified: Date
metaData: ItemBucketMetadata
versionId?: string | null
}
export interface UploadedObjectInfo { export interface UploadedObjectInfo {
etag: string etag: string
versionId: string | null versionId: string | null
} }
export interface ItemBucketMetadataList {
Items: MetadataItem[]
}
export type ItemBucketMetadata = Record<string, any>
export type BucketItemWithMetadata = BucketItem & {
metadata?: ItemBucketMetadata | ItemBucketMetadataList
}
/**
* @public
*/
export type WorkspaceItem = Required<BucketItem> & { metaData: ItemBucketMetadata }
/** /**
* @public * @public
*/ */
@ -53,22 +13,33 @@ export function getBucketId (workspaceId: WorkspaceId): string {
return toWorkspaceString(workspaceId, '.') return toWorkspaceString(workspaceId, '.')
} }
export interface StorageAdapter { export type ListBlobResult = Omit<Blob, 'contentType' | 'version'>
exists: (workspaceId: WorkspaceId) => Promise<boolean>
make: (workspaceId: WorkspaceId) => Promise<void> export interface StorageAdapter {
remove: (workspaceId: WorkspaceId, objectNames: string[]) => Promise<void> initialize: (ctx: MeasureContext, workspaceId: WorkspaceId) => Promise<void>
delete: (workspaceId: WorkspaceId) => Promise<void>
list: (workspaceId: WorkspaceId, prefix?: string) => Promise<WorkspaceItem[]> exists: (ctx: MeasureContext, workspaceId: WorkspaceId) => Promise<boolean>
stat: (workspaceId: WorkspaceId, objectName: string) => Promise<BucketItemStat> make: (ctx: MeasureContext, workspaceId: WorkspaceId) => Promise<void>
get: (workspaceId: WorkspaceId, objectName: string) => Promise<Readable> delete: (ctx: MeasureContext, workspaceId: WorkspaceId) => Promise<void>
remove: (ctx: MeasureContext, workspaceId: WorkspaceId, objectNames: string[]) => Promise<void>
list: (ctx: MeasureContext, workspaceId: WorkspaceId, prefix?: string) => Promise<ListBlobResult[]>
stat: (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string) => Promise<Blob | undefined>
get: (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string) => Promise<Readable>
put: ( put: (
ctx: MeasureContext,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
objectName: string, objectName: string,
stream: Readable | Buffer | string, stream: Readable | Buffer | string,
size?: number, contentType: string,
metaData?: ItemBucketMetadata size?: number
) => Promise<UploadedObjectInfo> ) => Promise<UploadedObjectInfo>
read: (workspaceId: WorkspaceId, name: string) => Promise<Buffer[]> read: (ctx: MeasureContext, workspaceId: WorkspaceId, name: string) => Promise<Buffer[]>
partial: (workspaceId: WorkspaceId, objectName: string, offset: number, length?: number) => Promise<Readable> partial: (
ctx: MeasureContext,
workspaceId: WorkspaceId,
objectName: string,
offset: number,
length?: number
) => Promise<Readable>
} }

View File

@ -41,9 +41,9 @@ import {
type WorkspaceIdWithUrl type WorkspaceIdWithUrl
} from '@hcengineering/core' } from '@hcengineering/core'
import type { Asset, Resource } from '@hcengineering/platform' import type { Asset, Resource } from '@hcengineering/platform'
import { type StorageAdapter } from './storage'
import { type Readable } from 'stream' import { type Readable } from 'stream'
import { type ServiceAdaptersManager } from './service' import { type ServiceAdaptersManager } from './service'
import { type StorageAdapter } from './storage'
/** /**
* @public * @public
@ -426,3 +426,13 @@ export interface ServiceAdapterConfig {
db: string db: string
url: string url: string
} }
export interface StorageConfig {
name: string
kind: string
}
export interface StorageConfiguration {
default: string
storages: StorageConfig[]
}

View File

@ -1,5 +1,5 @@
import { DbAdapter } from '@hcengineering/server-core' import { DbAdapter } from '@hcengineering/server-core'
import { Domain, getWorkspaceId, Hierarchy } from '@hcengineering/core' import { Domain, getWorkspaceId, Hierarchy, MeasureMetricsContext } from '@hcengineering/core'
import { createElasticBackupDataAdapter } from '../backup' import { createElasticBackupDataAdapter } from '../backup'
import { Client } from '@elastic/elasticsearch' import { Client } from '@elastic/elasticsearch'
@ -11,7 +11,12 @@ describe('Elastic Data Adapter', () => {
let adapter: DbAdapter let adapter: DbAdapter
beforeEach(async () => { beforeEach(async () => {
adapter = await createElasticBackupDataAdapter(new Hierarchy(), url, getWorkspaceId('ws1', '')) adapter = await createElasticBackupDataAdapter(
new MeasureMetricsContext('test', {}),
new Hierarchy(),
url,
getWorkspaceId('ws1', '')
)
}) })
afterEach(async () => { afterEach(async () => {

View File

@ -298,6 +298,7 @@ class ElasticDataAdapter implements DbAdapter {
* @public * @public
*/ */
export async function createElasticBackupDataAdapter ( export async function createElasticBackupDataAdapter (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
workspaceId: WorkspaceId workspaceId: WorkspaceId

View File

@ -51,11 +51,13 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"@hcengineering/elastic": "^0.6.0", "@hcengineering/elastic": "^0.6.0",
"@hcengineering/server-core": "^0.6.1", "@hcengineering/server-core": "^0.6.1",
"@hcengineering/server": "^0.6.4",
"@hcengineering/server-token": "^0.6.7", "@hcengineering/server-token": "^0.6.7",
"@hcengineering/attachment": "^0.6.9", "@hcengineering/attachment": "^0.6.9",
"body-parser": "^1.20.2", "body-parser": "^1.20.2",
"sharp": "~0.32.0", "sharp": "~0.32.0",
"@hcengineering/minio": "^0.6.0", "@hcengineering/minio": "^0.6.0",
"@hcengineering/mongo": "^0.6.1",
"morgan": "^1.10.0" "morgan": "^1.10.0"
} }
} }

View File

@ -15,7 +15,7 @@
// //
import { MeasureContext, WorkspaceId, metricsAggregate } from '@hcengineering/core' import { MeasureContext, WorkspaceId, metricsAggregate } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio' import { StorageAdapter } from '@hcengineering/server-core'
import { Token, decodeToken } from '@hcengineering/server-token' import { Token, decodeToken } from '@hcengineering/server-token'
import bp from 'body-parser' import bp from 'body-parser'
import cors from 'cors' import cors from 'cors'
@ -35,19 +35,16 @@ const cacheControlNoCache = 'max-age=1d, no-cache, must-revalidate'
async function minioUpload ( async function minioUpload (
ctx: MeasureContext, ctx: MeasureContext,
minio: MinioService, storageAdapter: StorageAdapter,
workspace: WorkspaceId, workspace: WorkspaceId,
file: UploadedFile file: UploadedFile
): Promise<string> { ): Promise<string> {
const id = uuid() const id = uuid()
const meta: any = {
'Content-Type': file.mimetype
}
const resp = await ctx.with( const resp = await ctx.with(
'storage upload', 'storage upload',
{ workspace: workspace.name }, { workspace: workspace.name },
async () => await minio.put(workspace, id, file.data, file.size, meta), async () => await storageAdapter.put(ctx, workspace, id, file.data, file.mimetype, file.size),
{ file: file.name, contentType: file.mimetype } { file: file.name, contentType: file.mimetype }
) )
@ -76,13 +73,17 @@ function getRange (range: string, size: number): [number, number] {
async function getFileRange ( async function getFileRange (
ctx: MeasureContext, ctx: MeasureContext,
range: string, range: string,
client: MinioService, client: StorageAdapter,
workspace: WorkspaceId, workspace: WorkspaceId,
uuid: string, uuid: string,
res: Response res: Response
): Promise<void> { ): Promise<void> {
const stat = await ctx.with('stats', {}, async () => await client.stat(workspace, uuid)) const stat = await ctx.with('stats', {}, async () => await client.stat(ctx, workspace, uuid))
if (stat === undefined) {
await ctx.error('No such key', { file: uuid })
res.status(404).send()
return
}
const size: number = stat.size const size: number = stat.size
const [start, end] = getRange(range, size) const [start, end] = getRange(range, size)
@ -97,13 +98,13 @@ async function getFileRange (
await ctx.with( await ctx.with(
'write', 'write',
{ contentType: stat.metaData['content-type'] }, { contentType: stat.contentType },
async (ctx) => { async (ctx) => {
try { try {
const dataStream = await ctx.with( const dataStream = await ctx.with(
'partial', 'partial',
{}, {},
async () => await client.partial(workspace, uuid, start, end - start + 1), async () => await client.partial(ctx, workspace, uuid, start, end - start + 1),
{} {}
) )
res.writeHead(206, { res.writeHead(206, {
@ -111,9 +112,9 @@ async function getFileRange (
'Content-Range': `bytes ${start}-${end}/${size}`, 'Content-Range': `bytes ${start}-${end}/${size}`,
'Accept-Ranges': 'bytes', 'Accept-Ranges': 'bytes',
'Content-Length': end - start + 1, 'Content-Length': end - start + 1,
'Content-Type': stat.metaData['content-type'], 'Content-Type': stat.contentType,
Etag: stat.etag, Etag: stat.etag,
'Last-Modified': stat.lastModified.toISOString() 'Last-Modified': new Date(stat.modifiedOn).toISOString()
}) })
dataStream.pipe(res) dataStream.pipe(res)
@ -144,33 +145,38 @@ async function getFileRange (
res.status(500).send() res.status(500).send()
} }
}, },
{ ...stat.metaData, uuid, start, end: end - start + 1, ...stat.metaData } { uuid, start, end: end - start + 1 }
) )
} }
async function getFile ( async function getFile (
ctx: MeasureContext, ctx: MeasureContext,
client: MinioService, client: StorageAdapter,
workspace: WorkspaceId, workspace: WorkspaceId,
uuid: string, uuid: string,
req: Request, req: Request,
res: Response res: Response
): Promise<void> { ): Promise<void> {
const stat = await ctx.with('stat', {}, async () => await client.stat(workspace, uuid)) const stat = await ctx.with('stat', {}, async () => await client.stat(ctx, workspace, uuid))
if (stat === undefined) {
await ctx.error('No such key', { file: req.query.file })
res.status(404).send()
return
}
const etag = stat.etag const etag = stat.etag
if ( if (
preConditions.IfNoneMatch(req.headers, { etag }) === 'notModified' || preConditions.IfNoneMatch(req.headers, { etag }) === 'notModified' ||
preConditions.IfMatch(req.headers, { etag }) === 'notModified' || preConditions.IfMatch(req.headers, { etag }) === 'notModified' ||
preConditions.IfModifiedSince(req.headers, { lastModified: stat.lastModified }) === 'notModified' preConditions.IfModifiedSince(req.headers, { lastModified: new Date(stat.modifiedOn) }) === 'notModified'
) { ) {
// Matched, return not modified // Matched, return not modified
res.statusCode = 304 res.statusCode = 304
res.end() res.end()
return return
} }
if (preConditions.IfUnmodifiedSince(req.headers, { lastModified: stat.lastModified }) === 'failed') { if (preConditions.IfUnmodifiedSince(req.headers, { lastModified: new Date(stat.modifiedOn) }) === 'failed') {
// Send 412 (Precondition Failed) // Send 412 (Precondition Failed)
res.statusCode = 412 res.statusCode = 412
res.end() res.end()
@ -179,14 +185,14 @@ async function getFile (
await ctx.with( await ctx.with(
'write', 'write',
{ contentType: stat.metaData['content-type'] }, { contentType: stat.contentType },
async (ctx) => { async (ctx) => {
try { try {
const dataStream = await ctx.with('readable', {}, async () => await client.get(workspace, uuid)) const dataStream = await ctx.with('readable', {}, async () => await client.get(ctx, workspace, uuid))
res.writeHead(200, { res.writeHead(200, {
'Content-Type': stat.metaData['content-type'], 'Content-Type': stat.contentType,
Etag: stat.etag, Etag: stat.etag,
'Last-Modified': stat.lastModified.toISOString(), 'Last-Modified': new Date(stat.modifiedOn).toISOString(),
'Cache-Control': cacheControlValue 'Cache-Control': cacheControlValue
}) })
@ -210,7 +216,7 @@ async function getFile (
res.status(500).send() res.status(500).send()
} }
}, },
{ ...stat.metaData } {}
) )
} }
@ -223,7 +229,7 @@ export function start (
config: { config: {
transactorEndpoint: string transactorEndpoint: string
elasticUrl: string elasticUrl: string
minio: MinioService storageAdapter: StorageAdapter
accountsUrl: string accountsUrl: string
uploadUrl: string uploadUrl: string
modelVersion: string modelVersion: string
@ -325,8 +331,13 @@ export function start (
let uuid = req.query.file as string let uuid = req.query.file as string
const size = req.query.size as 'inline' | 'tiny' | 'x-small' | 'small' | 'medium' | 'large' | 'x-large' | 'full' const size = req.query.size as 'inline' | 'tiny' | 'x-small' | 'small' | 'medium' | 'large' | 'x-large' | 'full'
uuid = await getResizeID(size, uuid, config, payload) uuid = await getResizeID(ctx, size, uuid, config, payload)
const stat = await config.minio.stat(payload.workspace, uuid) const stat = await config.storageAdapter.stat(ctx, payload.workspace, uuid)
if (stat === undefined) {
await ctx.error('No such key', { file: req.query.file })
res.status(404).send()
return
}
const fileSize = stat.size const fileSize = stat.size
@ -334,14 +345,14 @@ export function start (
'accept-ranges': 'bytes', 'accept-ranges': 'bytes',
'content-length': fileSize, 'content-length': fileSize,
Etag: stat.etag, Etag: stat.etag,
'Last-Modified': stat.lastModified.toISOString() 'Last-Modified': new Date(stat.modifiedOn).toISOString()
}) })
res.status(200) res.status(200)
res.end() res.end()
} catch (error: any) { } catch (error: any) {
if (error?.code === 'NoSuchKey' || error?.code === 'NotFound') { if (error?.code === 'NoSuchKey' || error?.code === 'NotFound') {
console.log('No such key', req.query.file) await ctx.error('No such key', { file: req.query.file })
res.status(404).send() res.status(404).send()
return return
} else { } else {
@ -390,9 +401,9 @@ export function start (
const d = await ctx.with( const d = await ctx.with(
'notoken-stat', 'notoken-stat',
{ workspace: payload.workspace.name }, { workspace: payload.workspace.name },
async () => await config.minio.stat(payload.workspace, uuid) async () => await config.storageAdapter.stat(ctx, payload.workspace, uuid)
) )
if (!((d.metaData['content-type'] as string) ?? '').includes('image')) { if (d !== undefined && !(d.contentType ?? '').includes('image')) {
// Do not allow to return non images with no token. // Do not allow to return non images with no token.
if (token === undefined) { if (token === undefined) {
res.status(403).send() res.status(403).send()
@ -404,19 +415,19 @@ export function start (
const size = req.query.size as 'inline' | 'tiny' | 'x-small' | 'small' | 'medium' | 'large' | 'x-large' | 'full' const size = req.query.size as 'inline' | 'tiny' | 'x-small' | 'small' | 'medium' | 'large' | 'x-large' | 'full'
uuid = await ctx.with('resize', {}, async () => await getResizeID(size, uuid, config, payload)) uuid = await ctx.with('resize', {}, async () => await getResizeID(ctx, size, uuid, config, payload))
const range = req.headers.range const range = req.headers.range
if (range !== undefined) { if (range !== undefined) {
await ctx.with('file-range', { workspace: payload.workspace.name }, async (ctx) => { await ctx.with('file-range', { workspace: payload.workspace.name }, async (ctx) => {
await getFileRange(ctx, range, config.minio, payload.workspace, uuid, res) await getFileRange(ctx, range, config.storageAdapter, payload.workspace, uuid, res)
}) })
} else { } else {
await ctx.with( await ctx.with(
'file', 'file',
{ workspace: payload.workspace.name }, { workspace: payload.workspace.name },
async (ctx) => { async (ctx) => {
await getFile(ctx, config.minio, payload.workspace, uuid, req, res) await getFile(ctx, config.storageAdapter, payload.workspace, uuid, req, res)
}, },
{ uuid } { uuid }
) )
@ -479,7 +490,7 @@ export function start (
try { try {
const token = authHeader.split(' ')[1] const token = authHeader.split(' ')[1]
const payload = decodeToken(token) const payload = decodeToken(token)
const uuid = await minioUpload(ctx, config.minio, payload.workspace, file) const uuid = await minioUpload(ctx, config.storageAdapter, payload.workspace, file)
res.status(200).send(uuid) res.status(200).send(uuid)
} catch (error: any) { } catch (error: any) {
@ -508,13 +519,16 @@ export function start (
} }
// TODO: We need to allow delete only of user attached documents. (https://front.hc.engineering/workbench/platform/tracker/TSK-1081) // TODO: We need to allow delete only of user attached documents. (https://front.hc.engineering/workbench/platform/tracker/TSK-1081)
await config.minio.remove(payload.workspace, [uuid]) await config.storageAdapter.remove(ctx, payload.workspace, [uuid])
const extra = await config.minio.list(payload.workspace, uuid) // TODO: Add support for related documents.
// TODO: Move support of image resize/format change to separate place.
const extra = await config.storageAdapter.list(ctx, payload.workspace, uuid)
if (extra.length > 0) { if (extra.length > 0) {
await config.minio.remove( await config.storageAdapter.remove(
ctx,
payload.workspace, payload.workspace,
Array.from(extra.entries()).map((it) => it[1].name) Array.from(extra.entries()).map((it) => it[1]._id)
) )
} }
@ -570,10 +584,7 @@ export function start (
return return
} }
const id = uuid() const id = uuid()
const contentType = response.headers['content-type'] const contentType = response.headers['content-type'] ?? 'application/octet-stream'
const meta = {
'Content-Type': contentType
}
const data: Buffer[] = [] const data: Buffer[] = []
response response
.on('data', function (chunk) { .on('data', function (chunk) {
@ -581,8 +592,8 @@ export function start (
}) })
.on('end', function () { .on('end', function () {
const buffer = Buffer.concat(data) const buffer = Buffer.concat(data)
config.minio config.storageAdapter
.put(payload.workspace, id, buffer, 0, meta) .put(ctx, payload.workspace, id, buffer, contentType, buffer.length)
.then(async (objInfo) => { .then(async (objInfo) => {
console.log('uploaded uuid', id, objInfo.etag) console.log('uploaded uuid', id, objInfo.etag)
@ -649,9 +660,6 @@ export function start (
} }
const id = uuid() const id = uuid()
const contentType = response.headers['content-type'] const contentType = response.headers['content-type']
const meta = {
'Content-Type': contentType
}
const data: Buffer[] = [] const data: Buffer[] = []
response response
.on('data', function (chunk) { .on('data', function (chunk) {
@ -660,8 +668,8 @@ export function start (
.on('end', function () { .on('end', function () {
const buffer = Buffer.concat(data) const buffer = Buffer.concat(data)
// eslint-disable-next-line @typescript-eslint/no-misused-promises // eslint-disable-next-line @typescript-eslint/no-misused-promises
config.minio config.storageAdapter
.put(payload.workspace, id, buffer, 0, meta) .put(ctx, payload.workspace, id, buffer, contentType ?? 'application/octet-stream', buffer.length)
.then(async () => { .then(async () => {
console.log('uploaded uuid', id) console.log('uploaded uuid', id)
@ -753,9 +761,10 @@ export function start (
// | '2x-large' // | '2x-large'
// | 'full' // | 'full'
async function getResizeID ( async function getResizeID (
ctx: MeasureContext,
size: string, size: string,
uuid: string, uuid: string,
config: { minio: MinioService }, config: { storageAdapter: StorageAdapter },
payload: Token payload: Token
): Promise<string> { ): Promise<string> {
if (size !== undefined && size !== 'full') { if (size !== undefined && size !== 'full') {
@ -784,7 +793,7 @@ async function getResizeID (
let hasSmall = false let hasSmall = false
const sizeId = uuid + `%size%${width}` const sizeId = uuid + `%size%${width}`
try { try {
const d = await config.minio.stat(payload.workspace, sizeId) const d = await config.storageAdapter.stat(ctx, payload.workspace, sizeId)
hasSmall = d !== undefined && d.size > 0 hasSmall = d !== undefined && d.size > 0
} catch (err: any) { } catch (err: any) {
if (err.code !== 'NotFound') { if (err.code !== 'NotFound') {
@ -796,7 +805,7 @@ async function getResizeID (
uuid = sizeId uuid = sizeId
} else { } else {
// Let's get data and resize it // Let's get data and resize it
const data = Buffer.concat(await config.minio.read(payload.workspace, uuid)) const data = Buffer.concat(await config.storageAdapter.read(ctx, payload.workspace, uuid))
const dataBuff = await sharp(data) const dataBuff = await sharp(data)
.resize({ .resize({
@ -804,9 +813,9 @@ async function getResizeID (
}) })
.jpeg() .jpeg()
.toBuffer() .toBuffer()
await config.minio.put(payload.workspace, sizeId, dataBuff, dataBuff.length, {
'Content-Type': 'image/jpeg' // Add support of avif as well.
}) await config.storageAdapter.put(ctx, payload.workspace, sizeId, dataBuff, 'image/jpeg', dataBuff.length)
uuid = sizeId uuid = sizeId
} }
} }

View File

@ -15,8 +15,9 @@
// //
import { MeasureContext } from '@hcengineering/core' import { MeasureContext } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { setMetadata } from '@hcengineering/platform' import { setMetadata } from '@hcengineering/platform'
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server'
import { StorageConfiguration } from '@hcengineering/server-core'
import serverToken from '@hcengineering/server-token' import serverToken from '@hcengineering/server-token'
import { start } from '.' import { start } from '.'
@ -31,37 +32,20 @@ export function startFront (ctx: MeasureContext, extraConfig?: Record<string, st
process.exit(1) process.exit(1)
} }
const url = process.env.MONGO_URL
if (url === undefined) {
console.error('please provide mongodb url')
process.exit(1)
}
const elasticUrl = process.env.ELASTIC_URL const elasticUrl = process.env.ELASTIC_URL
if (elasticUrl === undefined) { if (elasticUrl === undefined) {
console.error('please provide elastic url') console.error('please provide elastic url')
process.exit(1) process.exit(1)
} }
const minioEndpoint = process.env.MINIO_ENDPOINT const storageConfig: StorageConfiguration = storageConfigFromEnv()
if (minioEndpoint === undefined) { const storageAdapter = buildStorageFromConfig(storageConfig, url)
console.error('please provide minio endpoint')
process.exit(1)
}
const minioAccessKey = process.env.MINIO_ACCESS_KEY
if (minioAccessKey === undefined) {
console.error('please provide minio access key')
process.exit(1)
}
const minioSecretKey = process.env.MINIO_SECRET_KEY
if (minioSecretKey === undefined) {
console.error('please provide minio secret key')
process.exit(1)
}
const minio = new MinioService({
endPoint: minioEndpoint,
port: 9000,
useSSL: false,
accessKey: minioAccessKey,
secretKey: minioSecretKey
})
const accountsUrl = process.env.ACCOUNTS_URL const accountsUrl = process.env.ACCOUNTS_URL
if (accountsUrl === undefined) { if (accountsUrl === undefined) {
@ -132,7 +116,7 @@ export function startFront (ctx: MeasureContext, extraConfig?: Record<string, st
const config = { const config = {
transactorEndpoint, transactorEndpoint,
elasticUrl, elasticUrl,
minio, storageAdapter,
accountsUrl, accountsUrl,
uploadUrl, uploadUrl,
modelVersion, modelVersion,

View File

@ -13,11 +13,17 @@
// limitations under the License. // limitations under the License.
// //
import { Client, type BucketItemStat, type ItemBucketMetadata, type UploadedObjectInfo } from 'minio' import { Client, type UploadedObjectInfo } from 'minio'
import { toWorkspaceString, type WorkspaceId } from '@hcengineering/core' import core, {
toWorkspaceString,
type Blob,
type MeasureContext,
type Ref,
type WorkspaceId
} from '@hcengineering/core'
import { type StorageAdapter, type WorkspaceItem } from '@hcengineering/server-core' import { type ListBlobResult, type StorageAdapter, type StorageConfig } from '@hcengineering/server-core'
import { type Readable } from 'stream' import { type Readable } from 'stream'
/** /**
@ -27,39 +33,63 @@ export function getBucketId (workspaceId: WorkspaceId): string {
return toWorkspaceString(workspaceId, '.') return toWorkspaceString(workspaceId, '.')
} }
export interface MinioConfig extends StorageConfig {
kind: 'minio'
region: string
endpoint: string
accessKeyId: string
secretAccessKey: string
port: number
useSSL: boolean
}
/** /**
* @public * @public
*/ */
export class MinioService implements StorageAdapter { export class MinioService implements StorageAdapter {
static config = 'minio'
client: Client client: Client
constructor (opt: { endPoint: string, port: number, accessKey: string, secretKey: string, useSSL: boolean }) { constructor (opt: { endPoint: string, port: number, accessKey: string, secretKey: string, useSSL: boolean }) {
this.client = new Client(opt) this.client = new Client(opt)
} }
async exists (workspaceId: WorkspaceId): Promise<boolean> { async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
async exists (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<boolean> {
return await this.client.bucketExists(getBucketId(workspaceId)) return await this.client.bucketExists(getBucketId(workspaceId))
} }
async make (workspaceId: WorkspaceId): Promise<void> { async make (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
await this.client.makeBucket(getBucketId(workspaceId), 'k8s') await this.client.makeBucket(getBucketId(workspaceId), 'k8s')
} }
async remove (workspaceId: WorkspaceId, objectNames: string[]): Promise<void> { async remove (ctx: MeasureContext, workspaceId: WorkspaceId, objectNames: string[]): Promise<void> {
await this.client.removeObjects(getBucketId(workspaceId), objectNames) await this.client.removeObjects(getBucketId(workspaceId), objectNames)
} }
async delete (workspaceId: WorkspaceId): Promise<void> { async delete (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
await this.client.removeBucket(getBucketId(workspaceId)) await this.client.removeBucket(getBucketId(workspaceId))
} }
async list (workspaceId: WorkspaceId, prefix?: string): Promise<WorkspaceItem[]> { async list (ctx: MeasureContext, workspaceId: WorkspaceId, prefix?: string): Promise<ListBlobResult[]> {
try { try {
const items = new Map<string, WorkspaceItem>() const items = new Map<string, ListBlobResult>()
const list = this.client.listObjects(getBucketId(workspaceId), prefix, true) const list = this.client.listObjects(getBucketId(workspaceId), prefix, true)
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
list.on('data', (data) => { list.on('data', (data) => {
if (data.name !== undefined) { if (data.name !== undefined) {
items.set(data.name, { metaData: {}, ...data } as any) items.set(data.name, {
_id: data.name as Ref<Blob>,
_class: core.class.Blob,
etag: data.etag,
size: data.size,
provider: '',
space: core.space.Configuration,
modifiedBy: core.account.ConfigUser,
modifiedOn: data.lastModified.getTime(),
storageId: data.name
})
} }
}) })
list.on('end', () => { list.on('end', () => {
@ -67,6 +97,7 @@ export class MinioService implements StorageAdapter {
resolve(null) resolve(null)
}) })
list.on('error', (err) => { list.on('error', (err) => {
list.destroy()
reject(err) reject(err)
}) })
}) })
@ -80,25 +111,41 @@ export class MinioService implements StorageAdapter {
} }
} }
async stat (workspaceId: WorkspaceId, objectName: string): Promise<BucketItemStat> { async stat (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Blob> {
return await this.client.statObject(getBucketId(workspaceId), objectName) const result = await this.client.statObject(getBucketId(workspaceId), objectName)
return {
provider: '',
_class: core.class.Blob,
_id: objectName as Ref<Blob>,
storageId: objectName,
contentType: result.metaData['content-type'],
size: result.size,
etag: result.etag,
space: core.space.Configuration,
modifiedBy: core.account.System,
modifiedOn: result.lastModified.getTime(),
version: result.versionId ?? null
}
} }
async get (workspaceId: WorkspaceId, objectName: string): Promise<Readable> { async get (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Readable> {
return await this.client.getObject(getBucketId(workspaceId), objectName) return await this.client.getObject(getBucketId(workspaceId), objectName)
} }
async put ( async put (
ctx: MeasureContext,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
objectName: string, objectName: string,
stream: Readable | Buffer | string, stream: Readable | Buffer | string,
size?: number, contentType: string,
metaData?: ItemBucketMetadata size?: number
): Promise<UploadedObjectInfo> { ): Promise<UploadedObjectInfo> {
return await this.client.putObject(getBucketId(workspaceId), objectName, stream, size, metaData) return await this.client.putObject(getBucketId(workspaceId), objectName, stream, size, {
'Content-Type': contentType
})
} }
async read (workspaceId: WorkspaceId, name: string): Promise<Buffer[]> { async read (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise<Buffer[]> {
const data = await this.client.getObject(getBucketId(workspaceId), name) const data = await this.client.getObject(getBucketId(workspaceId), name)
const chunks: Buffer[] = [] const chunks: Buffer[] = []
@ -122,7 +169,13 @@ export class MinioService implements StorageAdapter {
return chunks return chunks
} }
async partial (workspaceId: WorkspaceId, objectName: string, offset: number, length?: number): Promise<Readable> { async partial (
ctx: MeasureContext,
workspaceId: WorkspaceId,
objectName: string,
offset: number,
length?: number
): Promise<Readable> {
return await this.client.getPartialObject(getBucketId(workspaceId), objectName, offset, length) return await this.client.getPartialObject(getBucketId(workspaceId), objectName, offset, length)
} }
} }

View File

@ -54,6 +54,7 @@ const txes = genMinModel()
createTaskModel(txes) createTaskModel(txes)
async function createNullAdapter ( async function createNullAdapter (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
db: WorkspaceId, db: WorkspaceId,
@ -116,7 +117,13 @@ describe('mongo operations', () => {
} }
const mctx = new MeasureMetricsContext('', {}) const mctx = new MeasureMetricsContext('', {})
const txStorage = await createMongoTxAdapter(hierarchy, mongodbUri, getWorkspaceId(dbId, ''), model) const txStorage = await createMongoTxAdapter(
new MeasureMetricsContext('', {}),
hierarchy,
mongodbUri,
getWorkspaceId(dbId, ''),
model
)
// Put all transactions to Tx // Put all transactions to Tx
for (const t of txes) { for (const t of txes) {

View File

@ -14,5 +14,6 @@
// limitations under the License. // limitations under the License.
// //
export * from './rawAdapter'
export * from './storage' export * from './storage'
export * from './utils' export * from './utils'

View File

@ -0,0 +1,84 @@
import {
SortingOrder,
cutObjectArray,
toFindResult,
type Doc,
type DocumentQuery,
type Domain,
type FindOptions,
type FindResult,
type WorkspaceId
} from '@hcengineering/core'
import type { RawDBAdapter } from '@hcengineering/server-core'
import { type Document, type Filter, type Sort } from 'mongodb'
import { toArray, uploadDocuments } from './storage'
import { getMongoClient, getWorkspaceDB } from './utils'
export function createRawMongoDBAdapter (url: string): RawDBAdapter {
const client = getMongoClient(url)
const collectSort = (options: FindOptions<Doc>): Sort | undefined => {
if (options?.sort === undefined) {
return undefined
}
const sort: Sort = {}
let count = 0
for (const key in options.sort) {
const order = options.sort[key] === SortingOrder.Ascending ? 1 : -1
sort[key] = order
count++
}
if (count === 0) {
return undefined
}
return sort
}
return {
find: async function <T extends Doc>(
workspace: WorkspaceId,
domain: Domain,
query: DocumentQuery<T>,
options?: Omit<FindOptions<T>, 'projection' | 'lookup'>
): Promise<FindResult<T>> {
const db = getWorkspaceDB(await client.getClient(), workspace)
const coll = db.collection(domain)
let cursor = coll.find<T>(query as Filter<Document>, {
checkKeys: false,
enableUtf8Validation: false
})
let total: number = -1
if (options != null) {
if (options.sort !== undefined) {
const sort = collectSort(options)
if (sort !== undefined) {
cursor = cursor.sort(sort)
}
}
if (options.limit !== undefined || typeof query._id === 'string') {
if (options.total === true) {
total = await coll.countDocuments(query)
}
cursor = cursor.limit(options.limit ?? 1)
}
}
// Error in case of timeout
try {
const res = await toArray<T>(cursor)
if (options?.total === true && options?.limit === undefined) {
total = res.length
}
return toFindResult(res, total)
} catch (e) {
console.error('error during executing cursor in findAll', cutObjectArray(query), options, e)
throw e
}
},
upload: async (workspace, domain, docs) => {
const db = getWorkspaceDB(await client.getClient(), workspace)
const coll = db.collection(domain)
await uploadDocuments(docs, coll)
}
}
}

View File

@ -100,6 +100,67 @@ interface LookupStep {
pipeline?: any pipeline?: any
} }
export async function toArray<T> (cursor: AbstractCursor<T>): Promise<T[]> {
const data = await cursor.toArray()
await cursor.close()
return data
}
/**
* Return some estimation for object size
*/
function calcSize (obj: any): number {
if (typeof obj === 'undefined') {
return 0
}
if (typeof obj === 'function') {
return 0
}
let result = 0
for (const key in obj) {
// include prototype properties
const value = obj[key]
const type = getTypeOf(value)
result += key.length
switch (type) {
case 'Array':
result += 4 + calcSize(value)
break
case 'Object':
result += calcSize(value)
break
case 'Date':
result += 24 // Some value
break
case 'string':
result += (value as string).length
break
case 'number':
result += 8
break
case 'boolean':
result += 1
break
case 'symbol':
result += (value as symbol).toString().length
break
case 'bigint':
result += (value as bigint).toString().length
break
case 'undefined':
result += 1
break
case 'null':
result += 1
break
default:
result += value.toString().length
}
}
return result
}
abstract class MongoAdapterBase implements DbAdapter { abstract class MongoAdapterBase implements DbAdapter {
constructor ( constructor (
protected readonly db: Db, protected readonly db: Db,
@ -110,12 +171,6 @@ abstract class MongoAdapterBase implements DbAdapter {
async init (): Promise<void> {} async init (): Promise<void> {}
async toArray<T>(cursor: AbstractCursor<T>): Promise<T[]> {
const data = await cursor.toArray()
await cursor.close()
return data
}
async createIndexes (domain: Domain, config: Pick<IndexingConfiguration<Doc>, 'indexes'>): Promise<void> { async createIndexes (domain: Domain, config: Pick<IndexingConfiguration<Doc>, 'indexes'>): Promise<void> {
for (const vv of config.indexes) { for (const vv of config.indexes) {
try { try {
@ -469,7 +524,7 @@ abstract class MongoAdapterBase implements DbAdapter {
let result: WithLookup<T>[] = [] let result: WithLookup<T>[] = []
let total = options?.total === true ? 0 : -1 let total = options?.total === true ? 0 : -1
try { try {
result = (await ctx.with('toArray', {}, async (ctx) => await this.toArray(cursor), { result = (await ctx.with('toArray', {}, async (ctx) => await toArray(cursor), {
domain, domain,
pipeline pipeline
})) as any[] })) as any[]
@ -489,7 +544,7 @@ abstract class MongoAdapterBase implements DbAdapter {
checkKeys: false, checkKeys: false,
enableUtf8Validation: false enableUtf8Validation: false
}) })
const arr = await this.toArray(totalCursor) const arr = await toArray(totalCursor)
total = arr?.[0]?.total ?? 0 total = arr?.[0]?.total ?? 0
} }
return toFindResult(this.stripHash(result), total) return toFindResult(this.stripHash(result), total)
@ -620,7 +675,7 @@ abstract class MongoAdapterBase implements DbAdapter {
// Error in case of timeout // Error in case of timeout
try { try {
const res: T[] = await ctx.with('toArray', {}, async (ctx) => await this.toArray(cursor), { const res: T[] = await ctx.with('toArray', {}, async (ctx) => await toArray(cursor), {
mongoQuery, mongoQuery,
options, options,
domain domain
@ -729,7 +784,7 @@ abstract class MongoAdapterBase implements DbAdapter {
} }
const pos = (digest ?? '').indexOf('|') const pos = (digest ?? '').indexOf('|')
if (digest == null || digest === '' || pos === -1) { if (digest == null || digest === '' || pos === -1) {
const size = this.calcSize(d) const size = calcSize(d)
digest = hashID // we just need some random value digest = hashID // we just need some random value
bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`) bulkUpdate.set(d._id, `${digest}|${size.toString(16)}`)
@ -755,95 +810,19 @@ abstract class MongoAdapterBase implements DbAdapter {
} }
} }
/**
* Return some estimation for object size
*/
calcSize (obj: any): number {
if (typeof obj === 'undefined') {
return 0
}
if (typeof obj === 'function') {
return 0
}
let result = 0
for (const key in obj) {
// include prototype properties
const value = obj[key]
const type = getTypeOf(value)
result += key.length
switch (type) {
case 'Array':
result += 4 + this.calcSize(value)
break
case 'Object':
result += this.calcSize(value)
break
case 'Date':
result += 24 // Some value
break
case 'string':
result += (value as string).length
break
case 'number':
result += 8
break
case 'boolean':
result += 1
break
case 'symbol':
result += (value as symbol).toString().length
break
case 'bigint':
result += (value as bigint).toString().length
break
case 'undefined':
result += 1
break
case 'null':
result += 1
break
default:
result += value.toString().length
}
}
return result
}
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> { async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
if (docs.length === 0) { if (docs.length === 0) {
return [] return []
} }
const cursor = this.db.collection<Doc>(domain).find<Doc>({ _id: { $in: docs } }, { limit: docs.length }) const cursor = this.db.collection<Doc>(domain).find<Doc>({ _id: { $in: docs } }, { limit: docs.length })
const result = await this.toArray(cursor) const result = await toArray(cursor)
return this.stripHash(this.stripHash(result)) return this.stripHash(this.stripHash(result))
} }
async upload (domain: Domain, docs: Doc[]): Promise<void> { async upload (domain: Domain, docs: Doc[]): Promise<void> {
const coll = this.db.collection(domain) const coll = this.db.collection(domain)
const ops = Array.from(docs) await uploadDocuments(docs, coll)
while (ops.length > 0) {
const part = ops.splice(0, 500)
await coll.bulkWrite(
part.map((it) => {
const digest: string | null = (it as any)['%hash%']
if ('%hash%' in it) {
delete it['%hash%']
}
const size = this.calcSize(it)
return {
replaceOne: {
filter: { _id: it._id },
replacement: { ...it, '%hash%': digest == null ? null : `${digest}|${size.toString(16)}` },
upsert: true
}
}
})
)
}
} }
async update (domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>): Promise<void> { async update (domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>): Promise<void> {
@ -1290,7 +1269,7 @@ class MongoTxAdapter extends MongoAdapterBase implements TxAdapter {
.collection(DOMAIN_TX) .collection(DOMAIN_TX)
.find<Tx>({ objectSpace: core.space.Model }) .find<Tx>({ objectSpace: core.space.Model })
.sort({ _id: 1, modifiedOn: 1 }) .sort({ _id: 1, modifiedOn: 1 })
const model = await this.toArray(cursor) const model = await toArray(cursor)
// We need to put all core.account.System transactions first // We need to put all core.account.System transactions first
const systemTx: Tx[] = [] const systemTx: Tx[] = []
const userTx: Tx[] = [] const userTx: Tx[] = []
@ -1311,6 +1290,31 @@ class MongoTxAdapter extends MongoAdapterBase implements TxAdapter {
} }
} }
export async function uploadDocuments (docs: Doc[], coll: Collection<Document>): Promise<void> {
const ops = Array.from(docs)
while (ops.length > 0) {
const part = ops.splice(0, 500)
await coll.bulkWrite(
part.map((it) => {
const digest: string | null = (it as any)['%hash%']
if ('%hash%' in it) {
delete it['%hash%']
}
const size = calcSize(it)
return {
replaceOne: {
filter: { _id: it._id },
replacement: { ...it, '%hash%': digest == null ? null : `${digest}|${size.toString(16)}` },
upsert: true
}
}
})
)
}
}
function fillEnumSort ( function fillEnumSort (
enumOf: Enum, enumOf: Enum,
key: string, key: string,
@ -1402,6 +1406,7 @@ function translateLikeQuery (pattern: string): { $regex: string, $options: strin
* @public * @public
*/ */
export async function createMongoAdapter ( export async function createMongoAdapter (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
@ -1417,6 +1422,7 @@ export async function createMongoAdapter (
* @public * @public
*/ */
export async function createMongoTxAdapter ( export async function createMongoTxAdapter (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,

View File

@ -41,6 +41,7 @@
"@hcengineering/server-core": "^0.6.1", "@hcengineering/server-core": "^0.6.1",
"@hcengineering/server-ws": "^0.6.11", "@hcengineering/server-ws": "^0.6.11",
"@hcengineering/mongo": "^0.6.1", "@hcengineering/mongo": "^0.6.1",
"@hcengineering/minio": "^0.6.0",
"@hcengineering/elastic": "^0.6.0", "@hcengineering/elastic": "^0.6.0",
"elastic-apm-node": "~3.26.0", "elastic-apm-node": "~3.26.0",
"@hcengineering/server-token": "^0.6.7", "@hcengineering/server-token": "^0.6.7",

View File

@ -21,3 +21,4 @@ export * from './backup'
export * from './metrics' export * from './metrics'
export * from './rekoni' export * from './rekoni'
export * from './ydoc' export * from './ydoc'
export * from './starter'

View File

@ -33,12 +33,13 @@ import core, {
TxResult, TxResult,
WorkspaceId WorkspaceId
} from '@hcengineering/core' } from '@hcengineering/core'
import { DbAdapter, StorageAdapter, WorkspaceItem } from '@hcengineering/server-core' import { DbAdapter, ListBlobResult, StorageAdapter } from '@hcengineering/server-core'
class StorageBlobAdapter implements DbAdapter { class StorageBlobAdapter implements DbAdapter {
constructor ( constructor (
readonly workspaceId: WorkspaceId, readonly workspaceId: WorkspaceId,
readonly client: StorageAdapter readonly client: StorageAdapter,
readonly ctx: MeasureContext
) {} ) {}
async findAll<T extends Doc>( async findAll<T extends Doc>(
@ -63,18 +64,18 @@ class StorageBlobAdapter implements DbAdapter {
find (domain: Domain): StorageIterator { find (domain: Domain): StorageIterator {
let listReceived = false let listReceived = false
let items: WorkspaceItem[] = [] let items: ListBlobResult[] = []
let pos = 0 let pos = 0
return { return {
next: async () => { next: async () => {
if (!listReceived) { if (!listReceived) {
items = await this.client.list(this.workspaceId) items = await this.client.list(this.ctx, this.workspaceId)
listReceived = true listReceived = true
} }
if (pos < items?.length) { if (pos < items?.length) {
const item = items[pos] const item = items[pos]
const result = { const result = {
id: item.name, id: item._id,
hash: item.etag, hash: item.etag,
size: item.size size: item.size
} }
@ -89,17 +90,20 @@ class StorageBlobAdapter implements DbAdapter {
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> { async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
const result: Doc[] = [] const result: Doc[] = []
for (const item of docs) { for (const item of docs) {
const stat = await this.client.stat(this.workspaceId, item) const stat = await this.client.stat(this.ctx, this.workspaceId, item)
const chunks: Buffer[] = await this.client.read(this.workspaceId, item) if (stat === undefined) {
throw new Error(`Could not find blob ${item}`)
}
const chunks: Buffer[] = await this.client.read(this.ctx, this.workspaceId, item)
const final = Buffer.concat(chunks) const final = Buffer.concat(chunks)
const dta: BlobData = { const dta: BlobData = {
_id: item as Ref<BlobData>, _id: item as Ref<BlobData>,
_class: core.class.BlobData, _class: core.class.BlobData,
name: item as string, name: item as string,
size: stat.size, size: stat.size,
type: stat.metaData['content-type'], type: stat.contentType,
space: 'blob' as Ref<Space>, space: 'blob' as Ref<Space>,
modifiedOn: stat.lastModified.getTime(), modifiedOn: stat.modifiedOn,
modifiedBy: core.account.System, modifiedBy: core.account.System,
base64Data: final.toString('base64') base64Data: final.toString('base64')
} }
@ -118,20 +122,19 @@ class StorageBlobAdapter implements DbAdapter {
const blob = d as unknown as BlobData const blob = d as unknown as BlobData
// Remove existing document // Remove existing document
try { try {
await this.client.remove(this.workspaceId, [blob._id]) await this.client.remove(this.ctx, this.workspaceId, [blob._id])
} catch (ee) { } catch (ee) {
// ignore error // ignore error
} }
const buffer = Buffer.from(blob.base64Data, 'base64') const buffer = Buffer.from(blob.base64Data, 'base64')
await this.client.put(this.workspaceId, blob._id, buffer, buffer.length, { // TODO: Add support of
'Content-Type': blob.type, /// lastModified: new Date(blob.modifiedOn)
lastModified: new Date(blob.modifiedOn) await this.client.put(this.ctx, this.workspaceId, blob._id, buffer, blob.type, buffer.length)
})
} }
} }
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> { async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
await this.client.remove(this.workspaceId, docs) await this.client.remove(this.ctx, this.workspaceId, docs)
} }
async update (domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>): Promise<void> { async update (domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>): Promise<void> {
@ -143,6 +146,7 @@ class StorageBlobAdapter implements DbAdapter {
* @public * @public
*/ */
export async function createStorageDataAdapter ( export async function createStorageDataAdapter (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
@ -154,9 +158,9 @@ export async function createStorageDataAdapter (
} }
// We need to create bucket if it doesn't exist // We need to create bucket if it doesn't exist
if (storage !== undefined) { if (storage !== undefined) {
if (!(await storage.exists(workspaceId))) { if (!(await storage.exists(ctx, workspaceId))) {
await storage.make(workspaceId) await storage.make(ctx, workspaceId)
} }
} }
return new StorageBlobAdapter(workspaceId, storage) return new StorageBlobAdapter(workspaceId, storage, ctx)
} }

View File

@ -13,13 +13,14 @@
// limitations under the License. // limitations under the License.
// //
import { Hierarchy, ModelDb, WorkspaceId } from '@hcengineering/core' import { Hierarchy, MeasureContext, ModelDb, WorkspaceId } from '@hcengineering/core'
import { DbAdapter, DummyDbAdapter } from '@hcengineering/server-core' import { DbAdapter, DummyDbAdapter } from '@hcengineering/server-core'
/** /**
* @public * @public
*/ */
export async function createNullAdapter ( export async function createNullAdapter (
ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
url: string, url: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
@ -27,12 +28,3 @@ export async function createNullAdapter (
): Promise<DbAdapter> { ): Promise<DbAdapter> {
return new DummyDbAdapter() return new DummyDbAdapter()
} }
/**
* @public
*/
export interface MinioConfig {
endPoint: string
accessKey: string
secretKey: string
}

View File

@ -0,0 +1,159 @@
import { MinioConfig, MinioService } from '@hcengineering/minio'
import { createRawMongoDBAdapter } from '@hcengineering/mongo'
import { buildStorage, StorageAdapter, StorageConfiguration } from '@hcengineering/server-core'
import { serverFactories, ServerFactory } from '@hcengineering/server-ws'
export function storageConfigFromEnv (): StorageConfiguration {
const storageConfig: StorageConfiguration = JSON.parse(
process.env.STORAGE_CONFIG ?? '{ "default": "", "storages": []}'
)
if (storageConfig.storages.length === 0 || storageConfig.default === '') {
console.info('STORAGE_CONFIG is required for complex configuration, fallback to minio config')
let minioEndpoint = process.env.MINIO_ENDPOINT
if (minioEndpoint === undefined) {
console.error('MINIO_ENDPOINT is required')
process.exit(1)
}
const minioAccessKey = process.env.MINIO_ACCESS_KEY
if (minioAccessKey === undefined) {
console.error('MINIO_ACCESS_KEY is required')
process.exit(1)
}
let minioPort = 9000
const sp = minioEndpoint.split(':')
if (sp.length > 1) {
minioEndpoint = sp[0]
minioPort = parseInt(sp[1])
}
const minioSecretKey = process.env.MINIO_SECRET_KEY
if (minioSecretKey === undefined) {
console.error('MINIO_SECRET_KEY is required')
process.exit(1)
}
const minioConfig: MinioConfig = {
kind: 'minio',
name: 'minio',
port: minioPort,
region: 'us-east-1',
useSSL: false,
endpoint: minioEndpoint,
accessKeyId: minioAccessKey,
secretAccessKey: minioSecretKey
}
storageConfig.storages.push(minioConfig)
storageConfig.default = 'minio'
}
return storageConfig
}
export function serverConfigFromEnv (): {
url: string
elasticUrl: string
serverSecret: string
rekoniUrl: string
frontUrl: string
sesUrl: string | undefined
accountsUrl: string
serverPort: number
serverFactory: ServerFactory
enableCompression: boolean
elasticIndexName: string
} {
const serverPort = parseInt(process.env.SERVER_PORT ?? '3333')
const serverFactory = serverFactories[(process.env.SERVER_PROVIDER as string) ?? 'ws'] ?? serverFactories.ws
const enableCompression = (process.env.ENABLE_COMPRESSION ?? 'true') === 'true'
const url = process.env.MONGO_URL
if (url === undefined) {
console.error('please provide mongodb url')
process.exit(1)
}
const elasticUrl = process.env.ELASTIC_URL
if (elasticUrl === undefined) {
console.error('please provide elastic url')
process.exit(1)
}
const elasticIndexName = process.env.ELASTIC_INDEX_NAME
if (elasticIndexName === undefined) {
console.log('Please provide ELASTIC_INDEX_NAME')
process.exit(1)
}
const serverSecret = process.env.SERVER_SECRET
if (serverSecret === undefined) {
console.log('Please provide server secret')
process.exit(1)
}
const rekoniUrl = process.env.REKONI_URL
if (rekoniUrl === undefined) {
console.log('Please provide REKONI_URL url')
process.exit(1)
}
const frontUrl = process.env.FRONT_URL
if (frontUrl === undefined) {
console.log('Please provide FRONT_URL url')
process.exit(1)
}
const sesUrl = process.env.SES_URL
const accountsUrl = process.env.ACCOUNTS_URL
if (accountsUrl === undefined) {
console.log('Please provide ACCOUNTS_URL url')
process.exit(1)
}
return {
url,
elasticUrl,
elasticIndexName,
serverSecret,
rekoniUrl,
frontUrl,
sesUrl,
accountsUrl,
serverPort,
serverFactory,
enableCompression
}
}
// Temporary solution, until migration will be implemented.
const ONLY_MINIO = true
export function buildStorageFromConfig (config: StorageConfiguration, dbUrl: string): StorageAdapter {
if (ONLY_MINIO) {
const minioConfig = config.storages.find((it) => it.kind === 'minio') as MinioConfig
if (minioConfig === undefined) {
throw new Error('minio config is required')
}
return new MinioService({
accessKey: minioConfig.accessKeyId,
secretKey: minioConfig.accessKeyId,
endPoint: minioConfig.endpoint,
port: minioConfig.port,
useSSL: minioConfig.useSSL
})
}
return buildStorage(config, createRawMongoDBAdapter(dbUrl), (kind, config) => {
if (kind === MinioService.config) {
const c = config as MinioConfig
return new MinioService({
accessKey: c.accessKeyId,
secretKey: c.accessKeyId,
endPoint: c.endpoint,
port: c.port,
useSSL: c.useSSL
})
} else {
throw new Error('Unsupported storage kind:' + kind)
}
})
}

View File

@ -43,6 +43,7 @@
"@hcengineering/model": "^0.6.7", "@hcengineering/model": "^0.6.7",
"@hcengineering/server-token": "^0.6.7", "@hcengineering/server-token": "^0.6.7",
"@hcengineering/server-core": "^0.6.1", "@hcengineering/server-core": "^0.6.1",
"@hcengineering/server": "^0.6.4",
"@hcengineering/mongo": "^0.6.1", "@hcengineering/mongo": "^0.6.1",
"@hcengineering/minio": "^0.6.0" "@hcengineering/minio": "^0.6.0"
} }

View File

@ -25,14 +25,15 @@ import core, {
Hierarchy, Hierarchy,
IndexKind, IndexKind,
IndexOrder, IndexOrder,
MeasureContext,
ModelDb, ModelDb,
Tx, Tx,
WorkspaceId WorkspaceId
} from '@hcengineering/core' } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { consoleModelLogger, MigrateOperation, ModelLogger } from '@hcengineering/model' import { consoleModelLogger, MigrateOperation, ModelLogger } from '@hcengineering/model'
import { getWorkspaceDB } from '@hcengineering/mongo' import { getWorkspaceDB } from '@hcengineering/mongo'
import { StorageAdapter } from '@hcengineering/server-core' import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server'
import { StorageAdapter, StorageConfiguration } from '@hcengineering/server-core'
import { Db, Document, MongoClient } from 'mongodb' import { Db, Document, MongoClient } from 'mongodb'
import { connect } from './connect' import { connect } from './connect'
import toolPlugin from './plugin' import toolPlugin from './plugin'
@ -70,7 +71,7 @@ export class FileModelLogger implements ModelLogger {
* @public * @public
*/ */
export function prepareTools (rawTxes: Tx[]): { mongodbUri: string, storageAdapter: StorageAdapter, txes: Tx[] } { export function prepareTools (rawTxes: Tx[]): { mongodbUri: string, storageAdapter: StorageAdapter, txes: Tx[] } {
let minioEndpoint = process.env.MINIO_ENDPOINT const minioEndpoint = process.env.MINIO_ENDPOINT
if (minioEndpoint === undefined) { if (minioEndpoint === undefined) {
console.error('please provide minio endpoint') console.error('please provide minio endpoint')
process.exit(1) process.exit(1)
@ -94,28 +95,18 @@ export function prepareTools (rawTxes: Tx[]): { mongodbUri: string, storageAdapt
process.exit(1) process.exit(1)
} }
let minioPort = 9000 const storageConfig: StorageConfiguration = storageConfigFromEnv()
const sp = minioEndpoint.split(':')
if (sp.length > 1) {
minioEndpoint = sp[0]
minioPort = parseInt(sp[1])
}
const minio = new MinioService({ const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri)
endPoint: minioEndpoint,
port: minioPort,
useSSL: false,
accessKey: minioAccessKey,
secretKey: minioSecretKey
})
return { mongodbUri, storageAdapter: minio, txes: JSON.parse(JSON.stringify(rawTxes)) as Tx[] } return { mongodbUri, storageAdapter, txes: JSON.parse(JSON.stringify(rawTxes)) as Tx[] }
} }
/** /**
* @public * @public
*/ */
export async function initModel ( export async function initModel (
ctx: MeasureContext,
transactorUrl: string, transactorUrl: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
rawTxes: Tx[], rawTxes: Tx[],
@ -154,8 +145,8 @@ export async function initModel (
await createUpdateIndexes(connection, db, logger) await createUpdateIndexes(connection, db, logger)
logger.log('create minio bucket', { workspaceId }) logger.log('create minio bucket', { workspaceId })
if (!(await minio.exists(workspaceId))) { if (!(await minio.exists(ctx, workspaceId))) {
await minio.make(workspaceId) await minio.make(ctx, workspaceId)
} }
} catch (e: any) { } catch (e: any) {
logger.error('error', { error: e }) logger.error('error', { error: e })

View File

@ -65,6 +65,7 @@ services:
- SERVER_PORT=8083 - SERVER_PORT=8083
- SERVER_SECRET=secret - SERVER_SECRET=secret
- ACCOUNTS_URL=http://localhost:3003 - ACCOUNTS_URL=http://localhost:3003
- MONGO_URL=mongodb://mongodb:27018
- UPLOAD_URL=/files - UPLOAD_URL=/files
- TRANSACTOR_URL=ws://localhost:3334 - TRANSACTOR_URL=ws://localhost:3334
- ELASTIC_URL=http://elastic:9200 - ELASTIC_URL=http://elastic:9200