mirror of
https://github.com/hcengineering/platform.git
synced 2024-12-22 19:11:33 +03:00
UBERF-7114: Fix workspace from clone (#5703)
Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
parent
9d72283aff
commit
3e913c9db7
@ -11,6 +11,9 @@ dependencies:
|
||||
'@aws-sdk/client-s3':
|
||||
specifier: ^3.575.0
|
||||
version: 3.577.0
|
||||
'@aws-sdk/lib-storage':
|
||||
specifier: ^3.583.0
|
||||
version: 3.583.0(@aws-sdk/client-s3@3.577.0)
|
||||
'@aws-sdk/s3-request-presigner':
|
||||
specifier: ^3.582.0
|
||||
version: 3.582.0
|
||||
@ -917,9 +920,6 @@ dependencies:
|
||||
'@types/koa-bodyparser':
|
||||
specifier: ^4.3.3
|
||||
version: 4.3.12
|
||||
'@types/koa-morgan':
|
||||
specifier: ^1.0.8
|
||||
version: 1.0.8
|
||||
'@types/koa-passport':
|
||||
specifier: ^6.0.0
|
||||
version: 6.0.3
|
||||
@ -1151,9 +1151,6 @@ dependencies:
|
||||
koa-bodyparser:
|
||||
specifier: ^4.3.0
|
||||
version: 4.4.1
|
||||
koa-morgan:
|
||||
specifier: ^1.0.1
|
||||
version: 1.0.1
|
||||
koa-passport:
|
||||
specifier: ~6.0.0
|
||||
version: 6.0.0
|
||||
@ -1770,6 +1767,22 @@ packages:
|
||||
tslib: 2.6.2
|
||||
dev: false
|
||||
|
||||
/@aws-sdk/lib-storage@3.583.0(@aws-sdk/client-s3@3.577.0):
|
||||
resolution: {integrity: sha512-To3mCeSpJiHWxAh00S5+cRfx8BkbdmWvZG2Rvcz20Qqh/GmhMWeDbN4OjDTqcewWpqNhU0n1ShZY/GcIWSn+Pg==}
|
||||
engines: {node: '>=16.0.0'}
|
||||
peerDependencies:
|
||||
'@aws-sdk/client-s3': ^3.583.0
|
||||
dependencies:
|
||||
'@aws-sdk/client-s3': 3.577.0
|
||||
'@smithy/abort-controller': 3.0.0
|
||||
'@smithy/middleware-endpoint': 3.0.0
|
||||
'@smithy/smithy-client': 3.0.1
|
||||
buffer: 5.6.0
|
||||
events: 3.3.0
|
||||
stream-browserify: 3.0.0
|
||||
tslib: 2.6.2
|
||||
dev: false
|
||||
|
||||
/@aws-sdk/middleware-bucket-endpoint@3.577.0:
|
||||
resolution: {integrity: sha512-twlkNX2VofM6kHXzDEiJOiYCc9tVABe5cbyxMArRWscIsCWG9mamPhC77ezG4XsN9dFEwVdxEYD5Crpm/5EUiw==}
|
||||
engines: {node: '>=16.0.0'}
|
||||
@ -8954,6 +8967,13 @@ packages:
|
||||
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
|
||||
dev: false
|
||||
|
||||
/buffer@5.6.0:
|
||||
resolution: {integrity: sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==}
|
||||
dependencies:
|
||||
base64-js: 1.5.1
|
||||
ieee754: 1.2.1
|
||||
dev: false
|
||||
|
||||
/buffer@5.7.1:
|
||||
resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==}
|
||||
dependencies:
|
||||
@ -16394,6 +16414,13 @@ packages:
|
||||
- utf-8-validate
|
||||
dev: false
|
||||
|
||||
/stream-browserify@3.0.0:
|
||||
resolution: {integrity: sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==}
|
||||
dependencies:
|
||||
inherits: 2.0.4
|
||||
readable-stream: 3.6.2
|
||||
dev: false
|
||||
|
||||
/stream-chopper@3.0.1:
|
||||
resolution: {integrity: sha512-f7h+ly8baAE26iIjcp3VbnBkbIRGtrvV0X0xxFM/d7fwLTYnLzDPTXRKNxa2HZzohOrc96NTrR+FaV3mzOelNA==}
|
||||
dependencies:
|
||||
@ -18263,7 +18290,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/account-service.tgz:
|
||||
resolution: {integrity: sha512-41LxbvGXCGm44FSbEJRQju/3/rEYOoqGcOTtxFlhFha68GQApiBjYnesZZh5DUrkFsXt8/RUVNz5OUutPoENpg==, tarball: file:projects/account-service.tgz}
|
||||
resolution: {integrity: sha512-nobaJJXk2cwnaGafGa8skzZZ4Y3tn6AfYwO3SMo0hMyh5VSL6qzpqPo0Qyaf8PEgk/0IhyEt9fY9hqPzGnozAg==, tarball: file:projects/account-service.tgz}
|
||||
name: '@rush-temp/account-service'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -18691,7 +18718,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/backup-service.tgz(esbuild@0.20.1)(ts-node@10.9.2):
|
||||
resolution: {integrity: sha512-tMU5TaEJAhjWjFJZsV2Vx/PUPFL7UIVy4SrVGzuYJvXD+5rAbfeVw2HjGVwazFmmPfTnvD8qwSxsL0od2FjJ2A==, tarball: file:projects/backup-service.tgz}
|
||||
resolution: {integrity: sha512-YSs79WP+Awv9iu+GkQs8oGv3yLoJGiUIlVcMzXrPnV4tsCI+k5+US6cWjKbFYBreMdehjVx4Ri5IFMBMlpU8JA==, tarball: file:projects/backup-service.tgz}
|
||||
id: file:projects/backup-service.tgz
|
||||
name: '@rush-temp/backup-service'
|
||||
version: 0.0.0
|
||||
@ -20893,7 +20920,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/model-document.tgz:
|
||||
resolution: {integrity: sha512-St2Jwm/oU5EpPdS6Vlfb+C6OQzOM5NCyUW4MP3oM6XKLczldB2AykV1z4UuPPhw27w42e+9kgQ55Tet582412g==, tarball: file:projects/model-document.tgz}
|
||||
resolution: {integrity: sha512-Nx227HyaO4nkHt18fA59V5w1rZM4UkBVeIWWIZxRoSEm3PhvvWN1y754S0CYmDISHusw+MtbxN86XBWGK5ZRrg==, tarball: file:projects/model-document.tgz}
|
||||
name: '@rush-temp/model-document'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
@ -22871,12 +22898,13 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/s3.tgz(esbuild@0.20.1)(ts-node@10.9.2):
|
||||
resolution: {integrity: sha512-545xE/hFO0K5PpSsSnM7hTqQilfo1Psno9mMs6XSYEC8SZQFX/mV1j0/W9l3++yX7lZQwxHKHBv/morA8v/RAA==, tarball: file:projects/s3.tgz}
|
||||
resolution: {integrity: sha512-goEhEM88h+c7TwKuVhjIeU6Tj9sbGPC0yIMZs73pTnsc6tfBrguYcDwMFCn5r4YNzDIrK6lIOva3jlhSEai63Q==, tarball: file:projects/s3.tgz}
|
||||
id: file:projects/s3.tgz
|
||||
name: '@rush-temp/s3'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
'@aws-sdk/client-s3': 3.577.0
|
||||
'@aws-sdk/lib-storage': 3.583.0(@aws-sdk/client-s3@3.577.0)
|
||||
'@aws-sdk/s3-request-presigner': 3.582.0
|
||||
'@types/jest': 29.5.12
|
||||
'@types/node': 20.11.19
|
||||
@ -23309,7 +23337,7 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/server-core.tgz(esbuild@0.20.1)(ts-node@10.9.2):
|
||||
resolution: {integrity: sha512-nnEMB1zXtGYZYkx8LQ3LcxHqJ8rINXMKaqdzfnRIPRxYrdBocaE8QK56QJuFioera/FYZxJui4mrXrSXmMkDpg==, tarball: file:projects/server-core.tgz}
|
||||
resolution: {integrity: sha512-igJYqfFaPwOzhEA/uED0MqboUq/VGhhiRBUVGWqnfm42Xbl6SF07e7jdJGWkWzt8pEsQKgN732C9cuHm2Otx4A==, tarball: file:projects/server-core.tgz}
|
||||
id: file:projects/server-core.tgz
|
||||
name: '@rush-temp/server-core'
|
||||
version: 0.0.0
|
||||
|
@ -154,6 +154,9 @@ async function OnChatMessageCreated (tx: TxCUD<Doc>, control: TriggerControl): P
|
||||
}
|
||||
|
||||
const targetDoc = (await control.findAll(message.attachedToClass, { _id: message.attachedTo }, { limit: 1 }))[0]
|
||||
if (targetDoc === undefined) {
|
||||
return []
|
||||
}
|
||||
const isChannel = hierarchy.isDerived(targetDoc._class, chunter.class.Channel)
|
||||
const res: Tx[] = []
|
||||
|
||||
|
@ -38,6 +38,7 @@
|
||||
"@hcengineering/core": "^0.6.28",
|
||||
"@hcengineering/platform": "^0.6.9",
|
||||
"@hcengineering/text": "^0.6.1",
|
||||
"@hcengineering/analytics": "^0.6.0",
|
||||
"@hcengineering/query": "^0.6.8",
|
||||
"fast-equals": "^5.0.1",
|
||||
"@hcengineering/storage": "^0.6.0",
|
||||
|
@ -46,6 +46,7 @@ import { createStateDoc } from './indexer/utils'
|
||||
import { getScoringConfig, mapSearchResultDoc } from './mapper'
|
||||
import { type StorageAdapter } from './storage'
|
||||
import type { FullTextAdapter, IndexedDoc, ServerStorage, WithFind } from './types'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
/**
|
||||
* @public
|
||||
@ -177,7 +178,7 @@ export class FullTextIndex implements WithFind {
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
|
||||
classes = classes.filter((it, idx, arr) => arr.indexOf(it) === idx)
|
||||
|
@ -36,6 +36,7 @@ import {
|
||||
type FullTextPipelineStage
|
||||
} from './types'
|
||||
import { docKey, docUpdKey } from './utils'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
/**
|
||||
* @public
|
||||
@ -155,6 +156,7 @@ export class ContentRetrievalStage implements FullTextPipelineStage {
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
const wasError = (doc as any).error !== undefined
|
||||
|
||||
await pipeline.update(doc._id, false, { [docKey('error')]: JSON.stringify({ message: err.message, err }) })
|
||||
|
@ -45,6 +45,7 @@ import {
|
||||
isFullTextAttribute,
|
||||
loadIndexStageStage
|
||||
} from './utils'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
/**
|
||||
* @public
|
||||
@ -235,7 +236,7 @@ export class IndexedFieldStage implements FullTextPipelineStage {
|
||||
|
||||
await pipeline.update(docState._id, this.stageValue, docUpdate)
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
Analytics.handleError(err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ import {
|
||||
fullTextPushStageId
|
||||
} from './types'
|
||||
import { collectPropagate, collectPropagateClasses, docKey, isCustomAttr } from './utils'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
/**
|
||||
* @public
|
||||
@ -76,7 +77,7 @@ export class FullTextPushStage implements FullTextPipelineStage {
|
||||
this.dimmVectors[k] = Array.from(Array(v).keys()).map((it) => 0)
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
}
|
||||
|
||||
@ -194,6 +195,7 @@ export class FullTextPushStage implements FullTextPipelineStage {
|
||||
this.checkIntegrity(elasticDoc)
|
||||
bulk.push(elasticDoc)
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
const wasError = (doc as any).error !== undefined
|
||||
|
||||
await pipeline.update(doc._id, false, { [docKey('error')]: JSON.stringify({ message: err.message, err }) })
|
||||
@ -212,7 +214,7 @@ export class FullTextPushStage implements FullTextPipelineStage {
|
||||
await pipeline.update(doc._id, true, {})
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -287,7 +289,9 @@ function updateDoc2Elastic (
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {}
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
|
||||
docId = docIdOverride ?? docId
|
||||
if (docId === undefined) {
|
||||
|
@ -37,6 +37,7 @@ import { type DbAdapter } from '../adapter'
|
||||
import { RateLimiter } from '../limitter'
|
||||
import type { IndexedDoc } from '../types'
|
||||
import { type FullTextPipeline, type FullTextPipelineStage } from './types'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
export * from './content'
|
||||
export * from './field'
|
||||
@ -135,7 +136,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
try {
|
||||
await this.storage.update(this.metrics, DOMAIN_DOC_INDEX_STATE, this.pending)
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
Analytics.handleError(err)
|
||||
// Go one by one.
|
||||
for (const o of this.pending) {
|
||||
await this.storage.update(this.metrics, DOMAIN_DOC_INDEX_STATE, new Map([o]))
|
||||
@ -470,6 +471,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
toRemove.map((it) => it._id)
|
||||
)
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
// QuotaExceededError, ignore
|
||||
}
|
||||
}
|
||||
@ -532,6 +534,7 @@ export class FullTextIndexPipeline implements FullTextPipeline {
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
this.metrics.error('error during index', { error: err })
|
||||
}
|
||||
}
|
||||
|
@ -40,6 +40,7 @@ import {
|
||||
type FullTextPipelineStage
|
||||
} from './types'
|
||||
import { collectPropagate, collectPropagateClasses, isCustomAttr, loadIndexStageStage } from './utils'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
/**
|
||||
* @public
|
||||
@ -309,7 +310,7 @@ export async function extractIndexedValues (
|
||||
currentReplacement[attr] = repl
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(err)
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
}
|
||||
let embeddingText = ''
|
||||
|
@ -10,6 +10,7 @@ import type {
|
||||
import core, { DOMAIN_MODEL, IndexKind, IndexOrder } from '@hcengineering/core'
|
||||
import { deepEqual } from 'fast-equals'
|
||||
import type { DomainHelper, DomainHelperOperations } from '../adapter'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
export class DomainIndexHelperImpl implements DomainHelper {
|
||||
domains = new Map<Domain, Set<string | FieldIndex<Doc>>>()
|
||||
@ -137,6 +138,7 @@ export class DomainIndexHelperImpl implements DomainHelper {
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
ctx.error('error: failed to create index', { domain, vv, err })
|
||||
}
|
||||
}
|
||||
@ -151,13 +153,14 @@ export class DomainIndexHelperImpl implements DomainHelper {
|
||||
}
|
||||
ctx.info('drop index', { domain, name: c.name, has50Documents })
|
||||
await operations.dropIndex(domain, c.name)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
Analytics.handleError(err)
|
||||
console.error('error: failed to drop index', { c, err })
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
|
||||
if (bb.length > 0) {
|
||||
|
@ -74,6 +74,7 @@ import type {
|
||||
TriggerControl
|
||||
} from '../types'
|
||||
import { SessionContextImpl, createBroadcastEvent } from '../utils'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
export class TServerStorage implements ServerStorage {
|
||||
private readonly fulltext: FullTextIndex
|
||||
@ -915,6 +916,7 @@ export class TServerStorage implements ServerStorage {
|
||||
})
|
||||
} catch (err: any) {
|
||||
ctx.ctx.error('error process tx', { error: err })
|
||||
Analytics.handleError(err)
|
||||
throw err
|
||||
} finally {
|
||||
onEnds.forEach((p) => {
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
import core, {
|
||||
TxFactory,
|
||||
cutObjectArray,
|
||||
matchQuery,
|
||||
type AttachedDoc,
|
||||
type Class,
|
||||
@ -32,6 +33,7 @@ import core, {
|
||||
|
||||
import { getResource, type Resource } from '@hcengineering/platform'
|
||||
import type { Trigger, TriggerControl, TriggerFunc } from './types'
|
||||
import { Analytics } from '@hcengineering/analytics'
|
||||
|
||||
import serverCore from './plugin'
|
||||
|
||||
@ -90,18 +92,23 @@ export class Triggers {
|
||||
result: Tx[]
|
||||
): Promise<void> => {
|
||||
for (const tx of matches) {
|
||||
result.push(
|
||||
...(await trigger.op(tx, {
|
||||
...ctrl,
|
||||
ctx: ctx.ctx,
|
||||
txFactory: new TxFactory(tx.modifiedBy, true),
|
||||
findAll: async (clazz, query, options) => await ctrl.findAllCtx(ctx.ctx, clazz, query, options),
|
||||
apply: async (tx, broadcast, target) => {
|
||||
return await ctrl.applyCtx(ctx, tx, broadcast, target)
|
||||
},
|
||||
result
|
||||
}))
|
||||
)
|
||||
try {
|
||||
result.push(
|
||||
...(await trigger.op(tx, {
|
||||
...ctrl,
|
||||
ctx: ctx.ctx,
|
||||
txFactory: new TxFactory(tx.modifiedBy, true),
|
||||
findAll: async (clazz, query, options) => await ctrl.findAllCtx(ctx.ctx, clazz, query, options),
|
||||
apply: async (tx, broadcast, target) => {
|
||||
return await ctrl.applyCtx(ctx, tx, broadcast, target)
|
||||
},
|
||||
result
|
||||
}))
|
||||
)
|
||||
} catch (err: any) {
|
||||
ctx.ctx.error('failed to process trigger', { trigger: trigger.resource, tx, err })
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -139,9 +146,18 @@ export class Triggers {
|
||||
// If we have async triggers let's sheculed them after IO phase.
|
||||
const result: Tx[] = []
|
||||
for (const request of asyncRequest) {
|
||||
await ctx.with(request.trigger.resource, {}, async (ctx) => {
|
||||
await applyTrigger(ctx, request.matches, request.trigger, result)
|
||||
})
|
||||
try {
|
||||
await ctx.with(request.trigger.resource, {}, async (ctx) => {
|
||||
await applyTrigger(ctx, request.matches, request.trigger, result)
|
||||
})
|
||||
} catch (err: any) {
|
||||
ctx.ctx.error('failed to process trigger', {
|
||||
trigger: request.trigger.resource,
|
||||
matches: cutObjectArray(request.matches),
|
||||
err
|
||||
})
|
||||
Analytics.handleError(err)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
@ -38,6 +38,7 @@
|
||||
"@hcengineering/server-core": "^0.6.1",
|
||||
"@hcengineering/storage": "^0.6.0",
|
||||
"@aws-sdk/client-s3": "^3.575.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.582.0"
|
||||
"@aws-sdk/s3-request-presigner": "^3.582.0",
|
||||
"@aws-sdk/lib-storage": "^3.583.0"
|
||||
}
|
||||
}
|
||||
|
@ -14,6 +14,7 @@
|
||||
//
|
||||
|
||||
import { GetObjectCommand, S3 } from '@aws-sdk/client-s3'
|
||||
import { Upload } from '@aws-sdk/lib-storage'
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner'
|
||||
|
||||
import core, {
|
||||
@ -315,11 +316,36 @@ export class S3Service implements StorageAdapter {
|
||||
contentType: string,
|
||||
size?: number
|
||||
): Promise<UploadedObjectInfo> {
|
||||
if (size === undefined) {
|
||||
const uploadTask = new Upload({
|
||||
client: this.client,
|
||||
params: {
|
||||
Bucket: this.getBucketId(workspaceId),
|
||||
Key: this.getDocumentKey(workspaceId, objectName),
|
||||
ContentType: contentType,
|
||||
Body: stream
|
||||
},
|
||||
|
||||
// (optional) concurrency configuration
|
||||
queueSize: 1,
|
||||
|
||||
// (optional) size of each part, in bytes, at least 5MB
|
||||
partSize: 1024 * 1024 * 5,
|
||||
leavePartsOnError: false
|
||||
})
|
||||
|
||||
const output = await uploadTask.done()
|
||||
return {
|
||||
etag: output.ETag ?? '',
|
||||
versionId: output.VersionId ?? null
|
||||
}
|
||||
}
|
||||
|
||||
const result = await this.client.putObject({
|
||||
Bucket: this.getBucketId(workspaceId),
|
||||
Key: this.getDocumentKey(workspaceId, objectName),
|
||||
ContentLength: size,
|
||||
ContentType: contentType,
|
||||
ContentLength: size,
|
||||
Body: stream
|
||||
})
|
||||
return {
|
||||
|
Loading…
Reference in New Issue
Block a user