mirror of
https://github.com/hcengineering/platform.git
synced 2024-11-22 03:14:40 +03:00
UBERF-7126: Content type based storage configuration (#5781)
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
fb18d708ce
commit
863e9032cf
File diff suppressed because it is too large
Load Diff
@ -18,7 +18,7 @@ import { plugin } from '@hcengineering/platform'
|
||||
import type { Arr, Class, Data, Doc, Interface, Mixin, Obj, Ref } from '../classes'
|
||||
import { AttachedDoc, ClassifierKind, DOMAIN_MODEL } from '../classes'
|
||||
import core from '../component'
|
||||
import type { TxCreateDoc, TxCUD } from '../tx'
|
||||
import type { TxCUD, TxCreateDoc } from '../tx'
|
||||
import { DOMAIN_TX, TxFactory } from '../tx'
|
||||
|
||||
const txFactory = new TxFactory(core.account.System)
|
||||
@ -184,6 +184,14 @@ export function genMinModel (): TxCUD<Doc>[] {
|
||||
})
|
||||
)
|
||||
|
||||
txes.push(
|
||||
createClass(core.class.Blob, {
|
||||
label: 'Blob' as IntlString,
|
||||
extends: core.class.Blob,
|
||||
kind: ClassifierKind.CLASS
|
||||
})
|
||||
)
|
||||
|
||||
txes.push(
|
||||
createClass(test.mixin.TestMixin, {
|
||||
label: 'TestMixin' as IntlString,
|
||||
|
@ -5,44 +5,94 @@ import { getBlobHref, getClient, getCurrentWorkspaceUrl, getFileUrl } from '.'
|
||||
import presentation from './plugin'
|
||||
|
||||
type SupportedFormat = string
|
||||
const defaultSupportedFormats = 'avif,webp,heif, jpeg'
|
||||
const defaultSupportedFormats = 'avif,webp,heif,jpeg'
|
||||
|
||||
export interface ProviderPreviewConfig {
|
||||
// Identifier of provider
|
||||
// If set to '' could be applied to any provider, for example to exclude some 'image/gif' etc from being processing with providers.
|
||||
providerId: string
|
||||
// Preview url
|
||||
// If '' preview is disabled for config.
|
||||
previewUrl: string
|
||||
// A supported file formats
|
||||
formats: SupportedFormat[]
|
||||
|
||||
// Content type markers, will check by containts, if passed, only allow to be used with matched content types.
|
||||
contentTypes?: string[]
|
||||
}
|
||||
|
||||
export interface PreviewConfig {
|
||||
default?: ProviderPreviewConfig
|
||||
previewers: Record<string, ProviderPreviewConfig>
|
||||
previewers: Record<string, ProviderPreviewConfig[]>
|
||||
}
|
||||
|
||||
const defaultPreview = (): ProviderPreviewConfig => ({
|
||||
providerId: '',
|
||||
formats: ['avif', 'webp', 'jpg'],
|
||||
previewUrl: `/files/${getCurrentWorkspaceUrl()}?file=:blobId.:format&size=:size`
|
||||
})
|
||||
|
||||
/**
|
||||
*
|
||||
* PREVIEW_CONFIG env variable format.
|
||||
* A `;` separated list of triples, providerName|previewUrl|supportedFormats.
|
||||
|
||||
- providerName - a provider name should be same as in Storage configuration.
|
||||
It coult be empty and it will match by content types.
|
||||
- previewUrl - an Url with :workspace, :blobId, :downloadFile, :size, :format placeholders, they will be replaced in UI with an appropriate blob values.
|
||||
- supportedFormats - a `,` separated list of file extensions.
|
||||
- contentTypes - a ',' separated list of content type patterns.
|
||||
|
||||
*/
|
||||
export function parsePreviewConfig (config?: string): PreviewConfig {
|
||||
if (config === undefined) {
|
||||
// TODO: Remove after all migrated
|
||||
return {
|
||||
default: defaultPreview(),
|
||||
previewers: {}
|
||||
previewers: {
|
||||
'': [
|
||||
{
|
||||
providerId: '',
|
||||
contentTypes: ['image/gif', 'image/apng', 'image/svg'], // Disable gif and apng format preview.
|
||||
formats: [],
|
||||
previewUrl: ''
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
const result: PreviewConfig = { previewers: {} }
|
||||
const configs = config.split(';')
|
||||
const nolineData = config
|
||||
.split('\n')
|
||||
.map((it) => it.trim())
|
||||
.join(';')
|
||||
const configs = nolineData.split(';')
|
||||
for (const c of configs) {
|
||||
let [provider, url, formats] = c.split('|')
|
||||
if (c === '') {
|
||||
continue // Skip empty lines
|
||||
}
|
||||
let [provider, url, formats, contentTypes] = c.split('|').map((it) => it.trim())
|
||||
if (formats === undefined) {
|
||||
formats = defaultSupportedFormats
|
||||
}
|
||||
const p = { previewUrl: url, formats: formats.split(',') }
|
||||
const p: ProviderPreviewConfig = {
|
||||
providerId: provider,
|
||||
previewUrl: url,
|
||||
formats: formats.split(',').map((it) => it.trim()),
|
||||
// Allow preview only for images by default
|
||||
contentTypes:
|
||||
contentTypes !== undefined
|
||||
? contentTypes
|
||||
.split(',')
|
||||
.map((it) => it.trim())
|
||||
.filter((it) => it !== '')
|
||||
: ['image/']
|
||||
}
|
||||
|
||||
if (provider === '*') {
|
||||
result.default = p
|
||||
} else {
|
||||
result.previewers[provider] = p
|
||||
result.previewers[provider] = [...(result.previewers[provider] ?? []), p]
|
||||
}
|
||||
}
|
||||
return result
|
||||
@ -78,16 +128,37 @@ export async function getBlobSrcSet (_blob: Blob | undefined, file: Ref<Blob>, w
|
||||
return _blob !== undefined ? getSrcSet(_blob, width) : ''
|
||||
}
|
||||
|
||||
export function getSrcSet (_blob: Blob, width?: number): string {
|
||||
const blob = _blob as BlobLookup
|
||||
if (blob.contentType === 'image/gif') {
|
||||
return ''
|
||||
/**
|
||||
* Select content provider based on content type.
|
||||
*/
|
||||
export function selectProvider (
|
||||
blob: Blob,
|
||||
providers: Array<ProviderPreviewConfig | undefined>
|
||||
): ProviderPreviewConfig | undefined {
|
||||
const isMatched = (it: ProviderPreviewConfig): boolean =>
|
||||
it.contentTypes === undefined || it.contentTypes.some((e) => blob.contentType === e || blob.contentType.includes(e))
|
||||
|
||||
let candidate: ProviderPreviewConfig | undefined
|
||||
for (const p of providers) {
|
||||
if (p !== undefined && isMatched(p)) {
|
||||
if (p.previewUrl === '') {
|
||||
// we found one disable config line, so return it.
|
||||
return p
|
||||
}
|
||||
candidate = p
|
||||
}
|
||||
}
|
||||
|
||||
return candidate
|
||||
}
|
||||
|
||||
export function getSrcSet (_blob: Blob, width?: number): string {
|
||||
const blob = _blob as BlobLookup
|
||||
const c = getPreviewConfig()
|
||||
|
||||
const cfg = c.previewers[_blob.provider] ?? c.default
|
||||
if (cfg === undefined) {
|
||||
// Select providers from
|
||||
const cfg = selectProvider(blob, [...(c.previewers[_blob.provider] ?? []), ...(c.previewers[''] ?? []), c.default])
|
||||
if (cfg === undefined || cfg.previewUrl === '') {
|
||||
return '' // No previewer is available for blob
|
||||
}
|
||||
|
||||
@ -120,10 +191,11 @@ function blobToSrcSet (
|
||||
if (width !== undefined) {
|
||||
result +=
|
||||
fu.replaceAll(':size', `${width}`) +
|
||||
', ' +
|
||||
' 1x , ' +
|
||||
fu.replaceAll(':size', `${width * 2}`) +
|
||||
', ' +
|
||||
fu.replaceAll(':size', `${width * 3}`)
|
||||
' 2x, ' +
|
||||
fu.replaceAll(':size', `${width * 3}`) +
|
||||
' 3x'
|
||||
} else {
|
||||
result += fu.replaceAll(':size', `${-1}`)
|
||||
}
|
||||
|
@ -384,7 +384,7 @@ export function getCurrentWorkspaceUrl (): string {
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export function getFileUrl (file: Ref<PlatformBlob>, filename?: string): string {
|
||||
export function getFileUrl (file: Ref<PlatformBlob>, filename?: string, useToken?: boolean): string {
|
||||
if (file.includes('://')) {
|
||||
return file
|
||||
}
|
||||
@ -393,7 +393,8 @@ export function getFileUrl (file: Ref<PlatformBlob>, filename?: string): string
|
||||
if (!uploadUrl.includes('://')) {
|
||||
uploadUrl = concatLink(frontUrl ?? '', uploadUrl)
|
||||
}
|
||||
return `${uploadUrl}/${getCurrentWorkspaceUrl()}${filename !== undefined ? '/' + encodeURIComponent(filename) : ''}?file=${file}`
|
||||
const token = getMetadata(plugin.metadata.Token) ?? ''
|
||||
return `${uploadUrl}/${getCurrentWorkspaceUrl()}${filename !== undefined ? '/' + encodeURIComponent(filename) : ''}?file=${file}${useToken === true ? `&token=${token}` : ''}`
|
||||
}
|
||||
|
||||
export function sizeToWidth (size: string): number | undefined {
|
||||
|
@ -48,6 +48,11 @@ export interface BucketInfo {
|
||||
}
|
||||
|
||||
export interface StorageAdapter {
|
||||
// If specified will limit a blobs available to put into selected provider.
|
||||
// A set of content type patterns supported by this storage provider.
|
||||
// If not defined, will be suited for any other content types.
|
||||
contentTypes?: string[]
|
||||
|
||||
initialize: (ctx: MeasureContext, workspaceId: WorkspaceId) => Promise<void>
|
||||
|
||||
close: () => Promise<void>
|
||||
@ -86,7 +91,12 @@ export interface StorageAdapterEx extends StorageAdapter {
|
||||
defaultAdapter: string
|
||||
adapters?: Map<string, StorageAdapter>
|
||||
|
||||
syncBlobFromStorage: (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string) => Promise<void>
|
||||
syncBlobFromStorage: (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
objectName: string,
|
||||
provider?: string
|
||||
) => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
@ -199,3 +209,23 @@ export async function removeAllObjects (
|
||||
}
|
||||
await iterator.close()
|
||||
}
|
||||
|
||||
export async function objectsToArray (
|
||||
ctx: MeasureContext,
|
||||
storage: StorageAdapter,
|
||||
workspaceId: WorkspaceId,
|
||||
prefix?: string
|
||||
): Promise<ListBlobResult[]> {
|
||||
// We need to list all files and delete them
|
||||
const iterator = await storage.listStream(ctx, workspaceId, prefix)
|
||||
const bulk: ListBlobResult[] = []
|
||||
while (true) {
|
||||
const obj = await iterator.next()
|
||||
if (obj === undefined) {
|
||||
break
|
||||
}
|
||||
bulk.push(obj)
|
||||
}
|
||||
await iterator.close()
|
||||
return bulk
|
||||
}
|
||||
|
59
server/core/src/__tests__/aggregator.spec.ts
Normal file
59
server/core/src/__tests__/aggregator.spec.ts
Normal file
@ -0,0 +1,59 @@
|
||||
import { MeasureMetricsContext, type MeasureContext, type WorkspaceId } from '@hcengineering/core'
|
||||
import type { StorageAdapter } from '@hcengineering/storage'
|
||||
import { AggregatorStorageAdapter } from '../server'
|
||||
import { MemRawDBAdapter, MemStorageAdapter } from './memAdapters'
|
||||
|
||||
describe('aggregator tests', () => {
|
||||
function prepare1 (): {
|
||||
mem1: MemStorageAdapter
|
||||
mem2: MemStorageAdapter
|
||||
aggr: AggregatorStorageAdapter
|
||||
testCtx: MeasureContext
|
||||
ws1: WorkspaceId
|
||||
} {
|
||||
const mem1 = new MemStorageAdapter()
|
||||
mem1.contentTypes = ['application/ydoc']
|
||||
|
||||
const mem2 = new MemStorageAdapter()
|
||||
const adapters = new Map<string, StorageAdapter>()
|
||||
adapters.set('mem1', mem1)
|
||||
adapters.set('mem2', mem2)
|
||||
const blobs = new MemRawDBAdapter()
|
||||
const aggr = new AggregatorStorageAdapter(adapters, 'mem2', blobs)
|
||||
|
||||
const testCtx = new MeasureMetricsContext('test', {})
|
||||
const ws1: WorkspaceId = { name: 'ws1', productId: '' }
|
||||
return { mem1, mem2, aggr, ws1, testCtx }
|
||||
}
|
||||
it('choose a proper storage', async () => {
|
||||
const { aggr, ws1, testCtx } = prepare1()
|
||||
|
||||
// Test default provider
|
||||
await aggr.put(testCtx, ws1, 'test', 'data', 'text/plain')
|
||||
const stat = await aggr.stat(testCtx, ws1, 'test')
|
||||
expect(stat?.provider).toEqual('mem2')
|
||||
|
||||
// Test content typed provider
|
||||
await aggr.put(testCtx, ws1, 'test2', 'data', 'application/ydoc')
|
||||
const stat2 = await aggr.stat(testCtx, ws1, 'test2')
|
||||
expect(stat2?.provider).toEqual('mem1')
|
||||
})
|
||||
it('reuse existing storage', async () => {
|
||||
const { mem1, aggr, ws1, testCtx } = prepare1()
|
||||
|
||||
// Test default provider
|
||||
await mem1.put(testCtx, ws1, 'test', 'data', 'text/plain')
|
||||
await aggr.syncBlobFromStorage(testCtx, ws1, 'test', 'mem1')
|
||||
|
||||
const stat = await aggr.stat(testCtx, ws1, 'test')
|
||||
expect(stat?.provider).toEqual('mem1')
|
||||
|
||||
// Test content typed provider
|
||||
await aggr.put(testCtx, ws1, 'test', 'data2', 'text/plain')
|
||||
const stat2 = await aggr.stat(testCtx, ws1, 'test')
|
||||
expect(stat2?.provider).toEqual('mem1')
|
||||
|
||||
const dta = Buffer.concat(await aggr.read(testCtx, ws1, 'test')).toString()
|
||||
expect(dta).toEqual('data2')
|
||||
})
|
||||
})
|
255
server/core/src/__tests__/memAdapters.ts
Normal file
255
server/core/src/__tests__/memAdapters.ts
Normal file
@ -0,0 +1,255 @@
|
||||
import core, {
|
||||
Hierarchy,
|
||||
ModelDb,
|
||||
TxProcessor,
|
||||
toFindResult,
|
||||
type Blob,
|
||||
type BlobLookup,
|
||||
type Class,
|
||||
type Doc,
|
||||
type DocumentQuery,
|
||||
type DocumentUpdate,
|
||||
type Domain,
|
||||
type FindOptions,
|
||||
type FindResult,
|
||||
type MeasureContext,
|
||||
type Ref,
|
||||
type WorkspaceId,
|
||||
type WorkspaceIdWithUrl
|
||||
} from '@hcengineering/core'
|
||||
import { genMinModel } from '@hcengineering/core/src/__tests__/minmodel'
|
||||
import type {
|
||||
BlobLookupResult,
|
||||
BlobStorageIterator,
|
||||
BucketInfo,
|
||||
StorageAdapter,
|
||||
UploadedObjectInfo
|
||||
} from '@hcengineering/storage'
|
||||
import { Readable } from 'stream'
|
||||
import type { RawDBAdapter, RawDBAdapterStream } from '../adapter'
|
||||
|
||||
export class MemStorageAdapter implements StorageAdapter {
|
||||
contentTypes?: string[] | undefined
|
||||
files = new Map<string, Blob & { content: Buffer, workspace: string }>()
|
||||
|
||||
async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
|
||||
async exists (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<boolean> {
|
||||
return true
|
||||
}
|
||||
|
||||
async make (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
|
||||
|
||||
async delete (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
|
||||
|
||||
async listBuckets (ctx: MeasureContext, productId: string): Promise<BucketInfo[]> {
|
||||
const workspaces = new Set(Array.from(this.files.values()).map((it) => it.workspace))
|
||||
return Array.from(workspaces).map((it) => ({
|
||||
name: it,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, { name: it, productId: '' })
|
||||
},
|
||||
list: () => this.listStream(ctx, { name: it, productId: '' })
|
||||
}))
|
||||
}
|
||||
|
||||
async remove (ctx: MeasureContext, workspaceId: WorkspaceId, objectNames: string[]): Promise<void> {
|
||||
for (const k of objectNames) {
|
||||
this.files.delete(workspaceId.name + '/' + k)
|
||||
}
|
||||
}
|
||||
|
||||
async listStream (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
prefix?: string | undefined
|
||||
): Promise<BlobStorageIterator> {
|
||||
const files = Array.from(this.files.values()).filter((it) => it.workspace === workspaceId.name)
|
||||
return {
|
||||
next: async () => {
|
||||
return files.shift()
|
||||
},
|
||||
close: async () => {}
|
||||
}
|
||||
}
|
||||
|
||||
async stat (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Blob | undefined> {
|
||||
return this.files.get(workspaceId.name + '/' + objectName)
|
||||
}
|
||||
|
||||
async get (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Readable> {
|
||||
const readable = new Readable()
|
||||
readable._read = () => {}
|
||||
const content = this.files.get(workspaceId.name + '/' + objectName)?.content
|
||||
readable.push(content)
|
||||
readable.push(null)
|
||||
return readable
|
||||
}
|
||||
|
||||
async put (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
objectName: string,
|
||||
stream: string | Readable | Buffer,
|
||||
contentType: string,
|
||||
size?: number | undefined
|
||||
): Promise<UploadedObjectInfo> {
|
||||
const buffer: Buffer[] = []
|
||||
if (stream instanceof Buffer) {
|
||||
buffer.push(stream)
|
||||
} else if (typeof stream === 'string') {
|
||||
buffer.push(Buffer.from(stream))
|
||||
} else {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
stream.on('end', () => {
|
||||
resolve()
|
||||
})
|
||||
stream.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
stream.on('data', (data) => {
|
||||
buffer.push(data)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
const data = Buffer.concat(buffer)
|
||||
const dta = {
|
||||
_class: core.class.Blob,
|
||||
_id: objectName as any,
|
||||
contentType,
|
||||
size: data.length,
|
||||
content: data,
|
||||
etag: objectName,
|
||||
modifiedBy: core.account.System,
|
||||
modifiedOn: Date.now(),
|
||||
provider: '_test',
|
||||
space: '' as any,
|
||||
storageId: objectName,
|
||||
version: null,
|
||||
workspace: workspaceId.name
|
||||
}
|
||||
this.files.set(workspaceId.name + '/' + objectName, dta)
|
||||
return {
|
||||
etag: objectName,
|
||||
versionId: null
|
||||
}
|
||||
}
|
||||
|
||||
async read (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Buffer[]> {
|
||||
const content = this.files.get(workspaceId.name + '/' + objectName)?.content
|
||||
if (content === undefined) {
|
||||
throw new Error('NoSuchKey')
|
||||
}
|
||||
return [content]
|
||||
}
|
||||
|
||||
partial (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
objectName: string,
|
||||
offset: number,
|
||||
length?: number | undefined
|
||||
): Promise<Readable> {
|
||||
// Partial are not supported by
|
||||
throw new Error('NoSuchKey')
|
||||
}
|
||||
|
||||
async lookup (ctx: MeasureContext, workspaceId: WorkspaceIdWithUrl, docs: Blob[]): Promise<BlobLookupResult> {
|
||||
return { lookups: docs as unknown as BlobLookup[] }
|
||||
}
|
||||
}
|
||||
|
||||
export class MemRawDBAdapter implements RawDBAdapter {
|
||||
hierarchy: Hierarchy
|
||||
workspaces = new Map<string, ModelDb>()
|
||||
constructor () {
|
||||
this.hierarchy = new Hierarchy()
|
||||
const minModel = genMinModel()
|
||||
minModel.forEach((it) => {
|
||||
this.hierarchy.tx(it)
|
||||
})
|
||||
}
|
||||
|
||||
async find<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceId,
|
||||
domain: Domain,
|
||||
query: { _class: Ref<Class<T>> } & DocumentQuery<T>,
|
||||
options?: Omit<FindOptions<T>, 'projection' | 'lookup'>
|
||||
): Promise<FindResult<T>> {
|
||||
const db = this.workspaces.get(workspace.name)
|
||||
if (db === undefined) {
|
||||
return toFindResult([])
|
||||
}
|
||||
return await db.findAll(query._class as Ref<Class<T>>, query, options)
|
||||
}
|
||||
|
||||
async findStream<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceId,
|
||||
domain: Domain,
|
||||
query: { _class: Ref<Class<T>> } & DocumentQuery<T>,
|
||||
options?: Omit<FindOptions<T>, 'projection' | 'lookup'>
|
||||
): Promise<RawDBAdapterStream<T>> {
|
||||
const db = this.workspaces.get(workspace.name)
|
||||
let result: T[] = []
|
||||
if (db !== undefined) {
|
||||
result = await db.findAll(query._class as Ref<Class<T>>, query, options)
|
||||
}
|
||||
return {
|
||||
next: async () => {
|
||||
const doc = result.shift()
|
||||
return doc
|
||||
},
|
||||
close: async () => {}
|
||||
}
|
||||
}
|
||||
|
||||
async upload<T extends Doc>(ctx: MeasureContext, workspace: WorkspaceId, domain: Domain, docs: T[]): Promise<void> {
|
||||
let db = this.workspaces.get(workspace.name)
|
||||
if (db === undefined) {
|
||||
db = new ModelDb(this.hierarchy)
|
||||
this.workspaces.set(workspace.name, db)
|
||||
}
|
||||
for (const d of docs) {
|
||||
db.addDoc(d)
|
||||
}
|
||||
}
|
||||
|
||||
async update<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceId,
|
||||
domain: Domain,
|
||||
docs: Map<Ref<T>, DocumentUpdate<T>>
|
||||
): Promise<void> {
|
||||
let db = this.workspaces.get(workspace.name)
|
||||
if (db === undefined) {
|
||||
db = new ModelDb(this.hierarchy)
|
||||
this.workspaces.set(workspace.name, db)
|
||||
}
|
||||
for (const [du, upd] of docs.entries()) {
|
||||
const doc = db.getObject(du)
|
||||
TxProcessor.applyUpdate<T>(doc, upd)
|
||||
}
|
||||
}
|
||||
|
||||
async clean<T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceId,
|
||||
domain: Domain,
|
||||
docs: Ref<T>[]
|
||||
): Promise<void> {
|
||||
const db = this.workspaces.get(workspace.name)
|
||||
if (db === undefined) {
|
||||
return
|
||||
}
|
||||
for (const d of docs) {
|
||||
db.delDoc(d)
|
||||
}
|
||||
}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
}
|
@ -65,14 +65,14 @@ export interface RawDBAdapter {
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceId,
|
||||
domain: Domain,
|
||||
query: DocumentQuery<T>,
|
||||
query: { _class: Ref<Class<T>> } & DocumentQuery<T>,
|
||||
options?: Omit<FindOptions<T>, 'projection' | 'lookup'>
|
||||
) => Promise<FindResult<T>>
|
||||
findStream: <T extends Doc>(
|
||||
ctx: MeasureContext,
|
||||
workspace: WorkspaceId,
|
||||
domain: Domain,
|
||||
query: DocumentQuery<T>,
|
||||
query: { _class: Ref<Class<T>> } & DocumentQuery<T>,
|
||||
options?: Omit<FindOptions<T>, 'projection' | 'lookup'>
|
||||
) => Promise<RawDBAdapterStream<T>>
|
||||
upload: <T extends Doc>(ctx: MeasureContext, workspace: WorkspaceId, domain: Domain, docs: T[]) => Promise<void>
|
||||
|
@ -36,29 +36,44 @@ class NoSuchKeyError extends Error {
|
||||
export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterEx {
|
||||
constructor (
|
||||
readonly adapters: Map<string, StorageAdapter>,
|
||||
readonly defaultAdapter: string, // Adapter will be used to put new documents into
|
||||
readonly defaultAdapter: string, // Adapter will be used to put new documents into, if not matched by content type
|
||||
readonly dbAdapter: RawDBAdapter
|
||||
) {}
|
||||
|
||||
async syncBlobFromStorage (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<void> {
|
||||
const current = await this.dbAdapter.find<Blob>(
|
||||
ctx,
|
||||
workspaceId,
|
||||
DOMAIN_BLOB,
|
||||
{ _class: core.class.Blob, _id: objectName as Ref<Blob> },
|
||||
{ limit: 1 }
|
||||
)
|
||||
const provider = this.adapters.get(current[0]?.provider ?? this.defaultAdapter)
|
||||
async syncBlobFromStorage (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
objectName: string,
|
||||
providerId?: string
|
||||
): Promise<void> {
|
||||
let current: Blob | undefined = (
|
||||
await this.dbAdapter.find<Blob>(
|
||||
ctx,
|
||||
workspaceId,
|
||||
DOMAIN_BLOB,
|
||||
{ _class: core.class.Blob, _id: objectName as Ref<Blob> },
|
||||
{ limit: 1 }
|
||||
)
|
||||
).shift()
|
||||
if (current === undefined && providerId !== undefined) {
|
||||
current = await this.adapters.get(providerId)?.stat(ctx, workspaceId, objectName)
|
||||
if (current !== undefined) {
|
||||
current.provider = providerId
|
||||
}
|
||||
}
|
||||
|
||||
const provider = this.adapters.get(current?.provider ?? this.defaultAdapter)
|
||||
if (provider === undefined) {
|
||||
throw new NoSuchKeyError('No such provider found')
|
||||
}
|
||||
const stat = await provider.stat(ctx, workspaceId, objectName)
|
||||
if (stat !== undefined) {
|
||||
stat.provider = current[0]?.provider ?? this.defaultAdapter
|
||||
if (current[0] !== undefined) {
|
||||
await this.dbAdapter.clean(ctx, workspaceId, DOMAIN_BLOB, [current[0]._id])
|
||||
stat.provider = current?.provider ?? this.defaultAdapter
|
||||
if (current !== undefined) {
|
||||
await this.dbAdapter.clean(ctx, workspaceId, DOMAIN_BLOB, [current._id])
|
||||
}
|
||||
await this.dbAdapter.upload<Blob>(ctx, workspaceId, DOMAIN_BLOB, [stat])
|
||||
// TODO: We need to send notification about Blob is changed.
|
||||
}
|
||||
}
|
||||
|
||||
@ -202,6 +217,27 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
|
||||
return await provider.read(ctx, workspaceId, stat.storageId)
|
||||
}
|
||||
|
||||
selectProvider (
|
||||
forceProvider: string | undefined,
|
||||
contentType: string
|
||||
): { adapter: StorageAdapter | undefined, provider: string } {
|
||||
if (forceProvider !== undefined) {
|
||||
return { adapter: this.adapters.get(forceProvider), provider: forceProvider }
|
||||
}
|
||||
// try select provider based on content type matching.
|
||||
for (const [provider, adapter] of this.adapters.entries()) {
|
||||
if (adapter.contentTypes === undefined) {
|
||||
continue
|
||||
}
|
||||
if (adapter.contentTypes.some((it) => contentType.includes(it))) {
|
||||
// we have matched content type for adapter.
|
||||
return { adapter, provider }
|
||||
}
|
||||
}
|
||||
|
||||
return { adapter: this.adapters.get(this.defaultAdapter), provider: this.defaultAdapter }
|
||||
}
|
||||
|
||||
async put (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
@ -221,15 +257,15 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
|
||||
)
|
||||
).shift()
|
||||
|
||||
const provider = this.adapters.get(stat?.provider ?? this.defaultAdapter)
|
||||
if (provider === undefined) {
|
||||
const { provider, adapter } = this.selectProvider(stat?.provider, contentType)
|
||||
if (adapter === undefined) {
|
||||
throw new NoSuchKeyError('No such provider found')
|
||||
}
|
||||
|
||||
const result = await provider.put(ctx, workspaceId, objectName, stream, contentType, size)
|
||||
const result = await adapter.put(ctx, workspaceId, objectName, stream, contentType, size)
|
||||
|
||||
if (size === undefined || size === 0) {
|
||||
const docStats = await provider.stat(ctx, workspaceId, objectName)
|
||||
const docStats = await adapter.stat(ctx, workspaceId, objectName)
|
||||
if (docStats !== undefined) {
|
||||
if (contentType !== docStats.contentType) {
|
||||
contentType = docStats.contentType
|
||||
@ -244,7 +280,7 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
|
||||
modifiedBy: core.account.System,
|
||||
modifiedOn: Date.now(),
|
||||
space: core.space.Configuration,
|
||||
provider: this.defaultAdapter,
|
||||
provider,
|
||||
storageId: objectName,
|
||||
size: size ?? 0,
|
||||
contentType,
|
||||
|
@ -458,6 +458,8 @@ export interface StorageConfig {
|
||||
kind: string
|
||||
endpoint: string
|
||||
port?: number
|
||||
|
||||
contentTypes?: string[]
|
||||
}
|
||||
|
||||
export interface StorageConfiguration {
|
||||
|
@ -22,13 +22,15 @@ Front service is suited to deliver application bundles and resource assets, it a
|
||||
|
||||
## Preview service configuration
|
||||
|
||||
PREVIEW_CONFIG env variable foremat.
|
||||
PREVIEW_CONFIG env variable format.
|
||||
|
||||
A `;` separated list of triples, providerName|previewUrl|supportedFormats.
|
||||
|
||||
- providerName - a provider name should be same as in Storage configuration.
|
||||
It coult be empty and it will match by content types.
|
||||
- previewUrl - an Url with :workspace, :blobId, :downloadFile, :size, :format placeholders, they will be replaced in UI with an appropriate blob values.
|
||||
- supportedFormats - a `,` separated list of file extensions.
|
||||
- contentTypes - a ',' separated list of content type patterns.
|
||||
|
||||
PREVIEW_CONFIG=*|https://front.hc.engineering/files/:workspace/api/preview/?format=:format&width=:size&image=:downloadFile
|
||||
|
||||
|
@ -331,7 +331,9 @@ export function start (
|
||||
try {
|
||||
const cookies = ((req?.headers?.cookie as string) ?? '').split(';').map((it) => it.trim().split('='))
|
||||
|
||||
const token = cookies.find((it) => it[0] === 'presentation-metadata-Token')?.[1]
|
||||
const token =
|
||||
cookies.find((it) => it[0] === 'presentation-metadata-Token')?.[1] ??
|
||||
(req.query.token as string | undefined)
|
||||
payload = token !== undefined ? decodeToken(token) : payload
|
||||
|
||||
let uuid = req.params.file ?? req.query.file
|
||||
|
@ -38,6 +38,6 @@
|
||||
"@hcengineering/core": "^0.6.32",
|
||||
"@hcengineering/platform": "^0.6.11",
|
||||
"@hcengineering/server-core": "^0.6.1",
|
||||
"minio": "^7.0.26"
|
||||
"minio": "^8.0.0"
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
//
|
||||
// Copyright © 2022 Hardcore Engineering Inc.
|
||||
// Copyright © 2024 Hardcore Engineering Inc.
|
||||
//
|
||||
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License. You may
|
||||
@ -12,13 +12,66 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
import { getWorkspaceId } from '@hcengineering/core'
|
||||
import { getBucketId } from '..'
|
||||
|
||||
import { MeasureMetricsContext, generateId } from '@hcengineering/core'
|
||||
import { objectsToArray, type StorageConfiguration } from '@hcengineering/server-core'
|
||||
import { MinioService, processConfigFromEnv, type MinioConfig } from '..'
|
||||
|
||||
describe('minio operations', () => {
|
||||
it('check dot', async () => {
|
||||
const wsid = getWorkspaceId('my-workspace', 'product')
|
||||
const config: StorageConfiguration = { default: 'minio', storages: [] }
|
||||
const minioConfigVar = processConfigFromEnv(config)
|
||||
if (minioConfigVar !== undefined || config.storages[0] === undefined) {
|
||||
console.error('No Minio config env is configured:' + minioConfigVar)
|
||||
it.skip('No Minio config env is configured', async () => {})
|
||||
return
|
||||
}
|
||||
const toolCtx = new MeasureMetricsContext('test', {})
|
||||
it('check root bucket', async () => {
|
||||
const minioService = new MinioService({ ...(config.storages[0] as MinioConfig), rootBucket: 'test-bucket' })
|
||||
|
||||
expect(getBucketId(wsid)).toEqual('my-workspace.product')
|
||||
let existingTestBuckets = await minioService.listBuckets(toolCtx, '')
|
||||
// Delete old buckets
|
||||
for (const b of existingTestBuckets) {
|
||||
await b.delete()
|
||||
}
|
||||
|
||||
const genWorkspaceId1 = generateId()
|
||||
const genWorkspaceId2 = generateId()
|
||||
|
||||
expect(genWorkspaceId1).not.toEqual(genWorkspaceId2)
|
||||
|
||||
const ws1 = { name: genWorkspaceId1, productId: '' }
|
||||
const ws2 = { name: genWorkspaceId2, productId: '' }
|
||||
await minioService.make(toolCtx, ws1)
|
||||
await minioService.make(toolCtx, ws2)
|
||||
|
||||
const v1 = generateId()
|
||||
await minioService.put(toolCtx, ws1, 'obj1.txt', v1, 'text/plain')
|
||||
await minioService.put(toolCtx, ws2, 'obj2.txt', v1, 'text/plain')
|
||||
|
||||
const w1Objects = await objectsToArray(toolCtx, minioService, ws1)
|
||||
expect(w1Objects.map((it) => it._id)).toEqual(['obj1.txt'])
|
||||
|
||||
const w2Objects = await objectsToArray(toolCtx, minioService, ws2)
|
||||
expect(w2Objects.map((it) => it._id)).toEqual(['obj2.txt'])
|
||||
|
||||
await minioService.put(toolCtx, ws1, 'obj1.txt', 'obj1', 'text/plain')
|
||||
await minioService.put(toolCtx, ws1, 'obj2.txt', 'obj2', 'text/plain')
|
||||
|
||||
const w1Objects2 = await objectsToArray(toolCtx, minioService, ws1)
|
||||
expect(w1Objects2.map((it) => it._id)).toEqual(['obj1.txt', 'obj2.txt'])
|
||||
|
||||
const data = Buffer.concat(await minioService.read(toolCtx, ws1, 'obj1.txt'))
|
||||
|
||||
expect('obj1').toEqual(data.toString())
|
||||
|
||||
existingTestBuckets = await minioService.listBuckets(toolCtx, '')
|
||||
expect(existingTestBuckets.length).toEqual(2)
|
||||
// Delete old buckets
|
||||
for (const b of existingTestBuckets) {
|
||||
await b.delete()
|
||||
}
|
||||
existingTestBuckets = await minioService.listBuckets(toolCtx, '')
|
||||
expect(existingTestBuckets.length).toEqual(0)
|
||||
})
|
||||
})
|
||||
|
@ -18,6 +18,7 @@ import { Client, type BucketItem, type BucketStream } from 'minio'
|
||||
import core, {
|
||||
concatLink,
|
||||
toWorkspaceString,
|
||||
withContext,
|
||||
type Blob,
|
||||
type BlobLookup,
|
||||
type MeasureContext,
|
||||
@ -35,23 +36,23 @@ import serverCore, {
|
||||
type ListBlobResult,
|
||||
type StorageAdapter,
|
||||
type StorageConfig,
|
||||
type StorageConfiguration,
|
||||
type UploadedObjectInfo
|
||||
} from '@hcengineering/server-core'
|
||||
import { type Readable } from 'stream'
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export function getBucketId (workspaceId: WorkspaceId): string {
|
||||
return toWorkspaceString(workspaceId, '.')
|
||||
}
|
||||
|
||||
export interface MinioConfig extends StorageConfig {
|
||||
kind: 'minio'
|
||||
accessKey: string
|
||||
secretKey: string
|
||||
useSSL?: string
|
||||
region?: string
|
||||
|
||||
// If defined, all resources will be inside selected root bucket.
|
||||
rootBucket?: string
|
||||
|
||||
// A prefix string to be added to a bucketId in case rootBucket not used
|
||||
bucketPrefix?: string
|
||||
}
|
||||
|
||||
/**
|
||||
@ -60,7 +61,8 @@ export interface MinioConfig extends StorageConfig {
|
||||
export class MinioService implements StorageAdapter {
|
||||
static config = 'minio'
|
||||
client: Client
|
||||
constructor (readonly opt: Omit<MinioConfig, 'name' | 'kind'>) {
|
||||
contentTypes?: string[]
|
||||
constructor (readonly opt: MinioConfig) {
|
||||
this.client = new Client({
|
||||
endPoint: opt.endpoint,
|
||||
accessKey: opt.accessKey,
|
||||
@ -69,8 +71,11 @@ export class MinioService implements StorageAdapter {
|
||||
port: opt.port ?? 9000,
|
||||
useSSL: opt.useSSL === 'true'
|
||||
})
|
||||
this.contentTypes = opt.contentTypes
|
||||
}
|
||||
|
||||
async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
|
||||
|
||||
async lookup (ctx: MeasureContext, workspaceId: WorkspaceIdWithUrl, docs: Blob[]): Promise<BlobLookupResult> {
|
||||
const frontUrl = getMetadata(serverCore.metadata.FrontUrl) ?? ''
|
||||
for (const d of docs) {
|
||||
@ -81,52 +86,120 @@ export class MinioService implements StorageAdapter {
|
||||
return { lookups: docs as BlobLookup[] }
|
||||
}
|
||||
|
||||
async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
getBucketId (workspaceId: WorkspaceId): string {
|
||||
return this.opt.rootBucket ?? (this.opt.bucketPrefix ?? '') + toWorkspaceString(workspaceId, '.')
|
||||
}
|
||||
|
||||
async listBuckets (ctx: MeasureContext, productId: string): Promise<BucketInfo[]> {
|
||||
const productPostfix = getBucketId({
|
||||
name: '',
|
||||
productId
|
||||
})
|
||||
const buckets = await this.client.listBuckets()
|
||||
return buckets
|
||||
.filter((it) => it.name.endsWith(productPostfix))
|
||||
.map((it) => {
|
||||
let name = it.name
|
||||
name = name.slice(0, name.length - productPostfix.length)
|
||||
return {
|
||||
name,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, { name, productId })
|
||||
},
|
||||
list: async () => await this.listStream(ctx, { name, productId })
|
||||
}
|
||||
})
|
||||
getBucketFolder (workspaceId: WorkspaceId): string {
|
||||
return toWorkspaceString(workspaceId, '.')
|
||||
}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
|
||||
async exists (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<boolean> {
|
||||
return await this.client.bucketExists(getBucketId(workspaceId))
|
||||
return await this.client.bucketExists(this.getBucketId(workspaceId))
|
||||
}
|
||||
|
||||
@withContext('make')
|
||||
async make (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
|
||||
await this.client.makeBucket(getBucketId(workspaceId), this.opt.region ?? 'us-east-1')
|
||||
try {
|
||||
await this.client.makeBucket(this.getBucketId(workspaceId), this.opt.region ?? 'us-east-1')
|
||||
} catch (err: any) {
|
||||
if (err.code === 'BucketAlreadyOwnedByYou') {
|
||||
return
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async listBuckets (ctx: MeasureContext, productId: string): Promise<BucketInfo[]> {
|
||||
if (this.opt.rootBucket !== undefined) {
|
||||
const info = new Map<string, BucketInfo>()
|
||||
const stream = this.client.listObjects(this.opt.rootBucket, '', false)
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
stream.on('end', () => {
|
||||
stream.destroy()
|
||||
resolve()
|
||||
})
|
||||
stream.on('error', (err) => {
|
||||
console.error(err)
|
||||
stream?.destroy()
|
||||
reject(err)
|
||||
})
|
||||
stream.on('data', (data) => {
|
||||
const wsName = data.prefix?.split('/')?.[0]
|
||||
if (wsName !== undefined && !info.has(wsName)) {
|
||||
info.set(wsName, {
|
||||
name: wsName,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, { name: wsName, productId })
|
||||
},
|
||||
list: async () => await this.listStream(ctx, { name: wsName, productId })
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
stream.destroy()
|
||||
return Array.from(info.values())
|
||||
} else {
|
||||
const productPostfix = this.getBucketFolder({
|
||||
name: '',
|
||||
productId
|
||||
})
|
||||
const buckets = await this.client.listBuckets()
|
||||
return buckets
|
||||
.filter((it) => it.name.endsWith(productPostfix))
|
||||
.map((it) => {
|
||||
let name = it.name
|
||||
name = name.slice(0, name.length - productPostfix.length)
|
||||
return {
|
||||
name,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, { name, productId })
|
||||
},
|
||||
list: async () => await this.listStream(ctx, { name, productId })
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
getDocumentKey (workspace: WorkspaceId, name: string): string {
|
||||
return this.opt.rootBucket === undefined ? name : `${this.getBucketFolder(workspace)}/${name}`
|
||||
}
|
||||
|
||||
@withContext('remove')
|
||||
async remove (ctx: MeasureContext, workspaceId: WorkspaceId, objectNames: string[]): Promise<void> {
|
||||
await this.client.removeObjects(getBucketId(workspaceId), objectNames)
|
||||
const toRemove = objectNames.map((it) => this.getDocumentKey(workspaceId, it))
|
||||
await this.client.removeObjects(this.getBucketId(workspaceId), toRemove)
|
||||
}
|
||||
|
||||
@withContext('delete')
|
||||
async delete (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
|
||||
try {
|
||||
await removeAllObjects(ctx, this, workspaceId)
|
||||
} catch (err: any) {
|
||||
ctx.error('failed t oclean all objecrs', { error: err })
|
||||
}
|
||||
await this.client.removeBucket(getBucketId(workspaceId))
|
||||
if (this.opt.rootBucket === undefined) {
|
||||
// Also delete a bucket
|
||||
await this.client.removeBucket(this.getBucketId(workspaceId))
|
||||
}
|
||||
}
|
||||
|
||||
stripPrefix (prefix: string | undefined, key: string): string {
|
||||
if (prefix !== undefined && key.startsWith(prefix)) {
|
||||
return key.slice(prefix.length)
|
||||
}
|
||||
return key
|
||||
}
|
||||
|
||||
rootPrefix (workspaceId: WorkspaceId): string | undefined {
|
||||
return this.opt.rootBucket !== undefined ? this.getBucketFolder(workspaceId) + '/' : undefined
|
||||
}
|
||||
|
||||
@withContext('listStream')
|
||||
async listStream (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
@ -134,16 +207,21 @@ export class MinioService implements StorageAdapter {
|
||||
): Promise<BlobStorageIterator> {
|
||||
let hasMore = true
|
||||
let stream: BucketStream<BucketItem> | undefined
|
||||
let done = false
|
||||
let error: Error | undefined
|
||||
let onNext: () => void = () => {}
|
||||
const buffer: ListBlobResult[] = []
|
||||
|
||||
const rootPrefix = this.rootPrefix(workspaceId)
|
||||
return {
|
||||
next: async (): Promise<ListBlobResult | undefined> => {
|
||||
try {
|
||||
if (stream === undefined) {
|
||||
stream = this.client.listObjects(getBucketId(workspaceId), prefix, true)
|
||||
if (stream === undefined && !done) {
|
||||
const rprefix = rootPrefix !== undefined ? rootPrefix + (prefix ?? '') : prefix ?? ''
|
||||
stream = this.client.listObjects(this.getBucketId(workspaceId), rprefix, true)
|
||||
stream.on('end', () => {
|
||||
stream?.destroy()
|
||||
done = true
|
||||
stream = undefined
|
||||
hasMore = false
|
||||
onNext()
|
||||
@ -157,16 +235,17 @@ export class MinioService implements StorageAdapter {
|
||||
})
|
||||
stream.on('data', (data) => {
|
||||
if (data.name !== undefined) {
|
||||
const _id = this.stripPrefix(rootPrefix, data.name)
|
||||
buffer.push({
|
||||
_id: data.name as Ref<Blob>,
|
||||
_id: _id as Ref<Blob>,
|
||||
_class: core.class.Blob,
|
||||
etag: data.etag,
|
||||
size: data.size,
|
||||
provider: 'minio',
|
||||
provider: this.opt.name,
|
||||
space: core.space.Configuration,
|
||||
modifiedBy: core.account.ConfigUser,
|
||||
modifiedOn: data.lastModified.getTime(),
|
||||
storageId: data.name
|
||||
storageId: _id
|
||||
})
|
||||
}
|
||||
onNext()
|
||||
@ -207,14 +286,19 @@ export class MinioService implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('stat')
|
||||
async stat (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Blob | undefined> {
|
||||
try {
|
||||
const result = await this.client.statObject(getBucketId(workspaceId), objectName)
|
||||
const result = await this.client.statObject(
|
||||
this.getBucketId(workspaceId),
|
||||
this.getDocumentKey(workspaceId, objectName)
|
||||
)
|
||||
const rootPrefix = this.rootPrefix(workspaceId)
|
||||
return {
|
||||
provider: '',
|
||||
_class: core.class.Blob,
|
||||
_id: objectName as Ref<Blob>,
|
||||
storageId: objectName,
|
||||
_id: this.stripPrefix(rootPrefix, objectName) as Ref<Blob>,
|
||||
storageId: this.stripPrefix(rootPrefix, objectName),
|
||||
contentType: result.metaData['content-type'],
|
||||
size: result.size,
|
||||
etag: result.etag,
|
||||
@ -228,10 +312,12 @@ export class MinioService implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('get')
|
||||
async get (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Readable> {
|
||||
return await this.client.getObject(getBucketId(workspaceId), objectName)
|
||||
return await this.client.getObject(this.getBucketId(workspaceId), this.getDocumentKey(workspaceId, objectName))
|
||||
}
|
||||
|
||||
@withContext('put')
|
||||
async put (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
@ -240,13 +326,23 @@ export class MinioService implements StorageAdapter {
|
||||
contentType: string,
|
||||
size?: number
|
||||
): Promise<UploadedObjectInfo> {
|
||||
return await this.client.putObject(getBucketId(workspaceId), objectName, stream, size, {
|
||||
'Content-Type': contentType
|
||||
})
|
||||
return await this.client.putObject(
|
||||
this.getBucketId(workspaceId),
|
||||
this.getDocumentKey(workspaceId, objectName),
|
||||
stream,
|
||||
size,
|
||||
{
|
||||
'Content-Type': contentType
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async read (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise<Buffer[]> {
|
||||
const data = await this.client.getObject(getBucketId(workspaceId), name)
|
||||
@withContext('read')
|
||||
async read (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Buffer[]> {
|
||||
const data = await this.client.getObject(
|
||||
this.getBucketId(workspaceId),
|
||||
this.getDocumentKey(workspaceId, objectName)
|
||||
)
|
||||
const chunks: Buffer[] = []
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
@ -269,6 +365,7 @@ export class MinioService implements StorageAdapter {
|
||||
return chunks
|
||||
}
|
||||
|
||||
@withContext('partial')
|
||||
async partial (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
@ -276,6 +373,55 @@ export class MinioService implements StorageAdapter {
|
||||
offset: number,
|
||||
length?: number
|
||||
): Promise<Readable> {
|
||||
return await this.client.getPartialObject(getBucketId(workspaceId), objectName, offset, length)
|
||||
return await this.client.getPartialObject(
|
||||
this.getBucketId(workspaceId),
|
||||
this.getDocumentKey(workspaceId, objectName),
|
||||
offset,
|
||||
length
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export function processConfigFromEnv (storageConfig: StorageConfiguration): string | undefined {
|
||||
let minioEndpoint = process.env.MINIO_ENDPOINT
|
||||
if (minioEndpoint === undefined) {
|
||||
return 'MINIO_ENDPOINT'
|
||||
}
|
||||
const minioAccessKey = process.env.MINIO_ACCESS_KEY
|
||||
if (minioAccessKey === undefined) {
|
||||
return 'MINIO_ACCESS_KEY'
|
||||
}
|
||||
|
||||
let minioPort = 9000
|
||||
const sp = minioEndpoint.split(':')
|
||||
if (sp.length > 1) {
|
||||
minioEndpoint = sp[0]
|
||||
minioPort = parseInt(sp[1])
|
||||
}
|
||||
|
||||
const minioSecretKey = process.env.MINIO_SECRET_KEY
|
||||
if (minioSecretKey === undefined) {
|
||||
return 'MINIO_SECRET_KEY'
|
||||
}
|
||||
|
||||
const minioConfig: MinioConfig = {
|
||||
kind: 'minio',
|
||||
name: 'minio',
|
||||
port: minioPort,
|
||||
region: 'us-east-1',
|
||||
useSSL: 'false',
|
||||
endpoint: minioEndpoint,
|
||||
accessKey: minioAccessKey,
|
||||
secretKey: minioSecretKey
|
||||
}
|
||||
storageConfig.storages.push(minioConfig)
|
||||
storageConfig.default = 'minio'
|
||||
}
|
||||
|
||||
export function addMinioFallback (storageConfig: StorageConfiguration): void {
|
||||
const required = processConfigFromEnv(storageConfig)
|
||||
if (required !== undefined) {
|
||||
console.error(`Required ${required} env to be configured`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
77
server/s3/src/__tests__/s3.test.ts
Normal file
77
server/s3/src/__tests__/s3.test.ts
Normal file
@ -0,0 +1,77 @@
|
||||
//
|
||||
// Copyright © 2024 Hardcore Engineering Inc.
|
||||
//
|
||||
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License. You may
|
||||
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
//
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import { MeasureMetricsContext, generateId } from '@hcengineering/core'
|
||||
import { objectsToArray, type StorageConfiguration } from '@hcengineering/server-core'
|
||||
import { S3Service, processConfigFromEnv, type S3Config } from '..'
|
||||
|
||||
describe('minio operations', () => {
|
||||
const config: StorageConfiguration = { default: 'minio', storages: [] }
|
||||
const minioConfigVar = processConfigFromEnv(config)
|
||||
if (minioConfigVar !== undefined || config.storages[0] === undefined) {
|
||||
console.error('No S3 config env is configured:' + minioConfigVar)
|
||||
it.skip('No S3 config env is configured', async () => {})
|
||||
return
|
||||
}
|
||||
const toolCtx = new MeasureMetricsContext('test', {})
|
||||
it('check root bucket', async () => {
|
||||
const minioService = new S3Service({ ...(config.storages[0] as S3Config), rootBucket: 'haiodo-test-bucket' })
|
||||
|
||||
let existingTestBuckets = await minioService.listBuckets(toolCtx, '')
|
||||
// Delete old buckets
|
||||
for (const b of existingTestBuckets) {
|
||||
await b.delete()
|
||||
}
|
||||
|
||||
const genWorkspaceId1 = generateId()
|
||||
const genWorkspaceId2 = generateId()
|
||||
|
||||
expect(genWorkspaceId1).not.toEqual(genWorkspaceId2)
|
||||
|
||||
const ws1 = { name: genWorkspaceId1, productId: '' }
|
||||
const ws2 = { name: genWorkspaceId2, productId: '' }
|
||||
await minioService.make(toolCtx, ws1)
|
||||
await minioService.make(toolCtx, ws2)
|
||||
|
||||
const v1 = generateId()
|
||||
await minioService.put(toolCtx, ws1, 'obj1.txt', v1, 'text/plain')
|
||||
await minioService.put(toolCtx, ws2, 'obj2.txt', v1, 'text/plain')
|
||||
|
||||
const w1Objects = await objectsToArray(toolCtx, minioService, ws1)
|
||||
expect(w1Objects.map((it) => it._id)).toEqual(['obj1.txt'])
|
||||
|
||||
const w2Objects = await objectsToArray(toolCtx, minioService, ws2)
|
||||
expect(w2Objects.map((it) => it._id)).toEqual(['obj2.txt'])
|
||||
|
||||
await minioService.put(toolCtx, ws1, 'obj1.txt', 'obj1', 'text/plain')
|
||||
await minioService.put(toolCtx, ws1, 'obj2.txt', 'obj2', 'text/plain')
|
||||
|
||||
const w1Objects2 = await objectsToArray(toolCtx, minioService, ws1)
|
||||
expect(w1Objects2.map((it) => it._id)).toEqual(['obj1.txt', 'obj2.txt'])
|
||||
|
||||
const data = Buffer.concat(await minioService.read(toolCtx, ws1, 'obj1.txt'))
|
||||
|
||||
expect('obj1').toEqual(data.toString())
|
||||
|
||||
existingTestBuckets = await minioService.listBuckets(toolCtx, '')
|
||||
expect(existingTestBuckets.length).toEqual(2)
|
||||
// Delete old buckets
|
||||
for (const b of existingTestBuckets) {
|
||||
await b.delete()
|
||||
}
|
||||
existingTestBuckets = await minioService.listBuckets(toolCtx, '')
|
||||
expect(existingTestBuckets.length).toEqual(0)
|
||||
})
|
||||
})
|
@ -1,5 +1,5 @@
|
||||
//
|
||||
// Copyright © 2022 Hardcore Engineering Inc.
|
||||
// Copyright © 2024 Hardcore Engineering Inc.
|
||||
//
|
||||
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License. You may
|
||||
@ -33,6 +33,7 @@ import {
|
||||
type ListBlobResult,
|
||||
type StorageAdapter,
|
||||
type StorageConfig,
|
||||
type StorageConfiguration,
|
||||
type UploadedObjectInfo
|
||||
} from '@hcengineering/server-core'
|
||||
import { Readable } from 'stream'
|
||||
@ -65,6 +66,7 @@ export class S3Service implements StorageAdapter {
|
||||
static config = 's3'
|
||||
expireTime: number
|
||||
client: S3
|
||||
contentTypes?: string[]
|
||||
constructor (readonly opt: S3Config) {
|
||||
this.client = new S3({
|
||||
endpoint: opt.endpoint,
|
||||
@ -76,6 +78,43 @@ export class S3Service implements StorageAdapter {
|
||||
})
|
||||
|
||||
this.expireTime = parseInt(this.opt.expireTime ?? '168') * 3600 // use 7 * 24 - hours as default value for expireF
|
||||
this.contentTypes = opt.contentTypes
|
||||
}
|
||||
|
||||
async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
|
||||
|
||||
async lookup (ctx: MeasureContext, workspaceId: WorkspaceIdWithUrl, docs: Blob[]): Promise<BlobLookupResult> {
|
||||
const result: BlobLookupResult = {
|
||||
lookups: [],
|
||||
updates: new Map()
|
||||
}
|
||||
const now = Date.now()
|
||||
for (const d of docs) {
|
||||
// Let's add current from URI for previews.
|
||||
const bl = d as BlobLookup
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: this.getBucketId(workspaceId),
|
||||
Key: this.getDocumentKey(workspaceId, d.storageId),
|
||||
ResponseCacheControl: 'max-age=9d'
|
||||
})
|
||||
if (
|
||||
(bl.downloadUrl === undefined || (bl.downloadUrlExpire ?? 0) < now) &&
|
||||
(this.opt.allowPresign ?? 'true') === 'true'
|
||||
) {
|
||||
bl.downloadUrl = await getSignedUrl(this.client, command, {
|
||||
expiresIn: this.expireTime
|
||||
})
|
||||
bl.downloadUrlExpire = now + this.expireTime * 1000
|
||||
result.updates?.set(bl._id, {
|
||||
downloadUrl: bl.downloadUrl,
|
||||
downloadUrlExpire: bl.downloadUrlExpire
|
||||
})
|
||||
}
|
||||
|
||||
result.lookups.push(bl)
|
||||
}
|
||||
// this.client.presignedUrl(httpMethod, bucketName, objectName, callback)
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
@ -89,42 +128,8 @@ export class S3Service implements StorageAdapter {
|
||||
return toWorkspaceString(workspaceId, '.')
|
||||
}
|
||||
|
||||
async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
|
||||
|
||||
async close (): Promise<void> {}
|
||||
|
||||
async lookup (ctx: MeasureContext, workspaceId: WorkspaceIdWithUrl, docs: Blob[]): Promise<BlobLookupResult> {
|
||||
const result: BlobLookupResult = {
|
||||
lookups: [],
|
||||
updates: new Map()
|
||||
}
|
||||
const now = Date.now()
|
||||
for (const d of docs) {
|
||||
// Let's add current from URI for previews.
|
||||
const bl = d as BlobLookup
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: this.getBucketId(workspaceId),
|
||||
Key: this.getDocumentKey(workspaceId, d.storageId)
|
||||
})
|
||||
if (
|
||||
(bl.downloadUrl === undefined || (bl.downloadUrlExpire ?? 0) < now) &&
|
||||
(this.opt.allowPresign ?? 'true') === 'true'
|
||||
) {
|
||||
bl.downloadUrl = await getSignedUrl(this.client, command, {
|
||||
expiresIn: this.expireTime
|
||||
})
|
||||
bl.downloadUrlExpire = now + this.expireTime * 1000
|
||||
result.updates?.set(bl._id, {
|
||||
downloadUrl: bl.downloadUrl
|
||||
})
|
||||
}
|
||||
|
||||
result.lookups.push(bl)
|
||||
}
|
||||
// this.client.presignedUrl(httpMethod, bucketName, objectName, callback)
|
||||
return result
|
||||
}
|
||||
|
||||
async exists (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<boolean> {
|
||||
try {
|
||||
const result = await this.client.headBucket({
|
||||
@ -148,29 +153,73 @@ export class S3Service implements StorageAdapter {
|
||||
Bucket: this.getBucketId(workspaceId)
|
||||
})
|
||||
} catch (err: any) {
|
||||
if (err.Code === 'BucketAlreadyOwnedByYou') {
|
||||
return
|
||||
}
|
||||
ctx.error('error during create bucket', { err })
|
||||
}
|
||||
}
|
||||
|
||||
async listBuckets (ctx: MeasureContext, productId: string): Promise<BucketInfo[]> {
|
||||
const productPostfix = this.getBucketFolder({
|
||||
name: '',
|
||||
productId
|
||||
})
|
||||
const buckets = await this.client.listBuckets()
|
||||
return (buckets.Buckets ?? [])
|
||||
.filter((it) => it.Name !== undefined && it.Name.endsWith(productPostfix))
|
||||
.map((it) => {
|
||||
let name = it.Name ?? ''
|
||||
name = name.slice(0, name.length - productPostfix.length)
|
||||
return {
|
||||
name,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, { name, productId })
|
||||
},
|
||||
list: async () => await this.listStream(ctx, { name, productId })
|
||||
try {
|
||||
if (this.opt.rootBucket !== undefined) {
|
||||
const info = new Map<string, BucketInfo>()
|
||||
let token: string | undefined
|
||||
|
||||
while (true) {
|
||||
const res = await this.client.listObjectsV2({
|
||||
Bucket: this.opt.rootBucket,
|
||||
Prefix: '',
|
||||
Delimiter: '/',
|
||||
ContinuationToken: token
|
||||
})
|
||||
for (const data of res.CommonPrefixes ?? []) {
|
||||
const wsName = data.Prefix?.split('/')?.[0]
|
||||
if (wsName !== undefined && !info.has(wsName)) {
|
||||
info.set(wsName, {
|
||||
name: wsName,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, { name: wsName, productId })
|
||||
},
|
||||
list: async () => await this.listStream(ctx, { name: wsName, productId })
|
||||
})
|
||||
}
|
||||
}
|
||||
if (res.IsTruncated === true) {
|
||||
token = res.NextContinuationToken
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
return Array.from(info.values())
|
||||
} else {
|
||||
const productPostfix = this.getBucketFolder({
|
||||
name: '',
|
||||
productId
|
||||
})
|
||||
const buckets = await this.client.listBuckets()
|
||||
return (buckets.Buckets ?? [])
|
||||
.filter((it) => it.Name !== undefined && it.Name.endsWith(productPostfix))
|
||||
.map((it) => {
|
||||
let name = it.Name ?? ''
|
||||
name = name.slice(0, name.length - productPostfix.length)
|
||||
return {
|
||||
name,
|
||||
delete: async () => {
|
||||
await this.delete(ctx, { name, productId })
|
||||
},
|
||||
list: async () => await this.listStream(ctx, { name, productId })
|
||||
}
|
||||
})
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err.Code === 'NoSuchBucket') {
|
||||
return []
|
||||
}
|
||||
ctx.error('failed to list buckets', { rootBucket: this.opt.rootBucket })
|
||||
console.error(err)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
getDocumentKey (workspace: WorkspaceId, name: string): string {
|
||||
@ -189,13 +238,16 @@ export class S3Service implements StorageAdapter {
|
||||
|
||||
@withContext('delete')
|
||||
async delete (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
|
||||
try {
|
||||
await removeAllObjects(ctx, this, workspaceId)
|
||||
} catch (err: any) {
|
||||
ctx.error('failed t oclean all objecrs', { error: err })
|
||||
}
|
||||
if (this.opt.rootBucket === undefined) {
|
||||
// We should
|
||||
// We should also delete bucket
|
||||
await this.client.deleteBucket({
|
||||
Bucket: this.getBucketId(workspaceId)
|
||||
})
|
||||
} else {
|
||||
await removeAllObjects(ctx, this, workspaceId, this.getBucketFolder(workspaceId) + '/')
|
||||
}
|
||||
}
|
||||
|
||||
@ -206,21 +258,25 @@ export class S3Service implements StorageAdapter {
|
||||
return key
|
||||
}
|
||||
|
||||
rootPrefix (workspaceId: WorkspaceId): string | undefined {
|
||||
return this.opt.rootBucket !== undefined ? this.getBucketFolder(workspaceId) + '/' : undefined
|
||||
}
|
||||
|
||||
@withContext('listStream')
|
||||
async listStream (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
prefix?: string | undefined
|
||||
): Promise<BlobStorageIterator> {
|
||||
const hasMore = true
|
||||
let hasMore = true
|
||||
const buffer: ListBlobResult[] = []
|
||||
let token: string | undefined
|
||||
|
||||
const rootPrefix = this.opt.rootBucket !== undefined ? this.getBucketFolder(workspaceId) + '/' : undefined
|
||||
const rootPrefix = this.rootPrefix(workspaceId)
|
||||
return {
|
||||
next: async (): Promise<ListBlobResult | undefined> => {
|
||||
try {
|
||||
if (hasMore) {
|
||||
if (hasMore && buffer.length === 0) {
|
||||
const res = await this.client.listObjectsV2({
|
||||
Bucket: this.getBucketId(workspaceId),
|
||||
Prefix: rootPrefix !== undefined ? rootPrefix + (prefix ?? '') : prefix ?? '',
|
||||
@ -228,6 +284,8 @@ export class S3Service implements StorageAdapter {
|
||||
})
|
||||
if (res.IsTruncated === true) {
|
||||
token = res.NextContinuationToken
|
||||
} else {
|
||||
hasMore = false
|
||||
}
|
||||
|
||||
for (const data of res.Contents ?? []) {
|
||||
@ -237,7 +295,7 @@ export class S3Service implements StorageAdapter {
|
||||
_class: core.class.Blob,
|
||||
etag: data.ETag ?? '',
|
||||
size: data.Size ?? 0,
|
||||
provider: 's3',
|
||||
provider: this.opt.name,
|
||||
space: core.space.Configuration,
|
||||
modifiedBy: core.account.ConfigUser,
|
||||
modifiedOn: data.LastModified?.getTime() ?? 0,
|
||||
@ -248,7 +306,6 @@ export class S3Service implements StorageAdapter {
|
||||
} catch (err: any) {
|
||||
ctx.error('Failed to get list', { error: err, workspaceId: workspaceId.name, prefix })
|
||||
}
|
||||
|
||||
if (buffer.length > 0) {
|
||||
return buffer.shift()
|
||||
}
|
||||
@ -260,17 +317,19 @@ export class S3Service implements StorageAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
@withContext('stat')
|
||||
async stat (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Blob | undefined> {
|
||||
try {
|
||||
const result = await this.client.headObject({
|
||||
Bucket: this.getBucketId(workspaceId),
|
||||
Key: this.getDocumentKey(workspaceId, objectName)
|
||||
})
|
||||
const rootPrefix = this.rootPrefix(workspaceId)
|
||||
return {
|
||||
provider: '',
|
||||
_class: core.class.Blob,
|
||||
_id: objectName as Ref<Blob>,
|
||||
storageId: objectName,
|
||||
_id: this.stripPrefix(rootPrefix, objectName) as Ref<Blob>,
|
||||
storageId: this.stripPrefix(rootPrefix, objectName),
|
||||
contentType: result.ContentType ?? '',
|
||||
size: result.ContentLength ?? 0,
|
||||
etag: result.ETag ?? '',
|
||||
@ -380,6 +439,7 @@ export class S3Service implements StorageAdapter {
|
||||
return chunks
|
||||
}
|
||||
|
||||
@withContext('partial')
|
||||
async partial (
|
||||
ctx: MeasureContext,
|
||||
workspaceId: WorkspaceId,
|
||||
@ -391,3 +451,30 @@ export class S3Service implements StorageAdapter {
|
||||
return await this.doGet(ctx, workspaceId, objectName, range)
|
||||
}
|
||||
}
|
||||
|
||||
export function processConfigFromEnv (storageConfig: StorageConfiguration): string | undefined {
|
||||
const endpoint = process.env.S3_ENDPOINT
|
||||
if (endpoint === undefined) {
|
||||
return 'S3_ENDPOINT'
|
||||
}
|
||||
const accessKey = process.env.S3_ACCESS_KEY
|
||||
if (accessKey === undefined) {
|
||||
return 'S3_ACCESS_KEY'
|
||||
}
|
||||
|
||||
const secretKey = process.env.S3_SECRET_KEY
|
||||
if (secretKey === undefined) {
|
||||
return 'S3_SECRET_KEY'
|
||||
}
|
||||
|
||||
const minioConfig: S3Config = {
|
||||
kind: 's3',
|
||||
name: 's3',
|
||||
region: 'auto',
|
||||
endpoint,
|
||||
accessKey,
|
||||
secretKey
|
||||
}
|
||||
storageConfig.storages.push(minioConfig)
|
||||
storageConfig.default = 's3'
|
||||
}
|
||||
|
@ -1,41 +0,0 @@
|
||||
import { MinioConfig } from '@hcengineering/minio'
|
||||
import { StorageConfiguration } from '@hcengineering/server-core'
|
||||
|
||||
export function addMinioFallback (storageConfig: StorageConfiguration): void {
|
||||
let minioEndpoint = process.env.MINIO_ENDPOINT
|
||||
if (minioEndpoint === undefined) {
|
||||
console.error('MINIO_ENDPOINT is required')
|
||||
process.exit(1)
|
||||
}
|
||||
const minioAccessKey = process.env.MINIO_ACCESS_KEY
|
||||
if (minioAccessKey === undefined) {
|
||||
console.error('MINIO_ACCESS_KEY is required')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
let minioPort = 9000
|
||||
const sp = minioEndpoint.split(':')
|
||||
if (sp.length > 1) {
|
||||
minioEndpoint = sp[0]
|
||||
minioPort = parseInt(sp[1])
|
||||
}
|
||||
|
||||
const minioSecretKey = process.env.MINIO_SECRET_KEY
|
||||
if (minioSecretKey === undefined) {
|
||||
console.error('MINIO_SECRET_KEY is required')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const minioConfig: MinioConfig = {
|
||||
kind: 'minio',
|
||||
name: 'minio',
|
||||
port: minioPort,
|
||||
region: 'us-east-1',
|
||||
useSSL: 'false',
|
||||
endpoint: minioEndpoint,
|
||||
accessKey: minioAccessKey,
|
||||
secretKey: minioSecretKey
|
||||
}
|
||||
storageConfig.storages.push(minioConfig)
|
||||
storageConfig.default = 'minio'
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
import { MinioConfig, MinioService } from '@hcengineering/minio'
|
||||
import { MinioConfig, MinioService, addMinioFallback } from '@hcengineering/minio'
|
||||
import { createRawMongoDBAdapter } from '@hcengineering/mongo'
|
||||
import { S3Service, type S3Config } from '@hcengineering/s3'
|
||||
import {
|
||||
@ -8,15 +8,20 @@ import {
|
||||
buildStorage,
|
||||
type StorageConfig
|
||||
} from '@hcengineering/server-core'
|
||||
import { addMinioFallback } from './minio'
|
||||
|
||||
/*
|
||||
|
||||
A ';' separated list of URI's to configure the storage adapters.
|
||||
A ';' separated list of URI's to configure the storage adapters. A new lines will be ommited during parse.
|
||||
|
||||
Each config is in `kind|name|uri` format.
|
||||
Each config is in `kind===name|uri` format.
|
||||
Last one is used as default one.
|
||||
Each config is in `kind(,name)?|uri|contentTypes` format.
|
||||
|
||||
* kind - an storage kind minior/s3 for now.
|
||||
* name - a symbolic name for provider, name could be ommited in case kind will be used as name.
|
||||
* uri - an storage URI with encoded parameters.
|
||||
* contentTypes - a comma separated list of content type patterns. Like 'image/*,video/gif' will match all image/* and video/gif formats.
|
||||
So * will be replaced to `.*` for regexp
|
||||
|
||||
Last one is used as default one, or one with conrent type matched will be used.
|
||||
|
||||
Example:
|
||||
STORAGE_CONFIG=kind|minio|minio:9000?accessKey=minio&secretKey=minio&useSSL=false;\
|
||||
@ -45,9 +50,9 @@ export function parseStorageEnv (storageEnv: string, storageConfig: StorageConfi
|
||||
if (st.trim().length === 0 || !st.includes('|')) {
|
||||
throw new Error('Invalid storage config:' + st)
|
||||
}
|
||||
let [kind, name, url] = st.split('|')
|
||||
if (url == null) {
|
||||
url = name
|
||||
let [kindName, url, contentTypes] = st.split('|')
|
||||
let [kind, name] = kindName.split(',')
|
||||
if (name == null) {
|
||||
name = kind
|
||||
}
|
||||
let hasProtocol = true
|
||||
@ -61,7 +66,8 @@ export function parseStorageEnv (storageEnv: string, storageConfig: StorageConfi
|
||||
kind,
|
||||
name,
|
||||
endpoint: (hasProtocol ? uri.protocol + '//' : '') + uri.hostname, // Port should go away
|
||||
port: uri.port !== '' ? parseInt(uri.port) : undefined
|
||||
port: uri.port !== '' ? parseInt(uri.port) : undefined,
|
||||
contentTypes: contentTypes !== undefined ? contentTypes.split(',') : undefined
|
||||
}
|
||||
|
||||
// Add all extra parameters
|
||||
|
@ -16,7 +16,7 @@ describe('config-parse', () => {
|
||||
})
|
||||
it('single-minio-named', async () => {
|
||||
const cfg: StorageConfiguration = { default: '', storages: [] }
|
||||
parseStorageEnv('minio|myminio|localhost:9000?accessKey=minio&secretKey=minio2', cfg)
|
||||
parseStorageEnv('minio,myminio|localhost:9000?accessKey=minio&secretKey=minio2', cfg)
|
||||
expect(cfg.default).toEqual('myminio')
|
||||
const minio = cfg.storages[0] as MinioConfig
|
||||
expect(minio.endpoint).toEqual('localhost')
|
||||
@ -57,4 +57,19 @@ describe('config-parse', () => {
|
||||
expect(minio.secretKey).toEqual('minio2')
|
||||
expect((minio as any).downloadUrl).toEqual('https://front.hc.engineering')
|
||||
})
|
||||
it('test-decode unexpected symbols', async () => {
|
||||
const cfg: StorageConfiguration = { default: '', storages: [] }
|
||||
parseStorageEnv(
|
||||
'minio|localhost:9000?accessKey=%F0%9F%91%85%F0%9F%91%BB%20-%20%D0%AD%D0%A2%D0%9E%20%20%20%20%D1%82%D0%B0%D0%BA%D0%BE%D0%B9%20%D0%BF%D0%B0%D1%80%D0%BE%D0%BB%D1%8C%0A%D0%90%20%D1%82%D0%BE&secretKey=minio2&downloadUrl=https%3A%2F%2Ffront.hc.engineering|image/jpeg,image/gif',
|
||||
cfg
|
||||
)
|
||||
expect(cfg.default).toEqual('minio')
|
||||
const minio = cfg.storages[0] as MinioConfig
|
||||
expect(minio.endpoint).toEqual('localhost')
|
||||
expect(minio.port).toEqual(9000)
|
||||
expect(minio.accessKey).toEqual('👅👻 - ЭТО такой пароль\nА то')
|
||||
expect(minio.secretKey).toEqual('minio2')
|
||||
expect(minio.contentTypes).toEqual(['image/jpeg', 'image/gif'])
|
||||
expect((minio as any).downloadUrl).toEqual('https://front.hc.engineering')
|
||||
})
|
||||
})
|
||||
|
@ -125,6 +125,8 @@ export async function initModel (
|
||||
|
||||
await storageAdapter.make(ctx, workspaceId)
|
||||
|
||||
logger.log('connecting to transactor', { workspaceId, transactorUrl })
|
||||
|
||||
connection = (await connect(
|
||||
transactorUrl,
|
||||
workspaceId,
|
||||
|
Loading…
Reference in New Issue
Block a user