Use datalake storage (#6310)

Signed-off-by: Alexander Onnikov <Alexander.Onnikov@xored.com>
This commit is contained in:
Alexander Onnikov 2024-08-16 14:00:55 +07:00 committed by GitHub
parent 2caa8590f9
commit 9df6ba218c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
55 changed files with 878 additions and 287 deletions

6
.vscode/launch.json vendored
View File

@ -103,14 +103,15 @@
"ACCOUNTS_URL": "http://localhost:3000",
"UPLOAD_URL": "/files",
"SERVER_PORT": "8087",
"VERSION": null,
"COLLABORATOR_URL": "ws://localhost:3078",
"COLLABORATOR_API_URL": "http://localhost:3078",
"CALENDAR_URL": "http://localhost:8095",
"GMAIL_URL": "http://localhost:8088",
"TELEGRAM_URL": "http://localhost:8086",
"MODEL_VERSION": ""
"MODEL_VERSION": "",
"VERSION": ""
},
"runtimeVersion": "20",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"showAsyncStacks": true,
"sourceMaps": true,
@ -127,7 +128,6 @@
"SECRET": "secret",
"METRICS_CONSOLE": "true",
"ACCOUNTS_URL": "http://localhost:3000",
"UPLOAD_URL": "/files",
"MONGO_URL": "mongodb://localhost:27017",
"MINIO_ACCESS_KEY": "minioadmin",
"MINIO_SECRET_KEY": "minioadmin",

View File

@ -176,6 +176,9 @@ dependencies:
'@rush-temp/core':
specifier: file:./projects/core.tgz
version: file:projects/core.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2)
'@rush-temp/datalake':
specifier: file:./projects/datalake.tgz
version: file:projects/datalake.tgz(esbuild@0.20.1)(ts-node@10.9.2)
'@rush-temp/desktop':
specifier: file:./projects/desktop.tgz
version: file:projects/desktop.tgz(bufferutil@4.0.8)(sass@1.71.1)(utf-8-validate@6.0.4)
@ -1313,6 +1316,9 @@ dependencies:
'@types/web-push':
specifier: ~3.6.3
version: 3.6.3
'@types/ws':
specifier: ^8.5.11
version: 8.5.11
'@typescript-eslint/eslint-plugin':
specifier: ^6.11.0
version: 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3)
@ -1511,6 +1517,9 @@ dependencies:
eslint-plugin-svelte:
specifier: ^2.35.1
version: 2.35.1(eslint@8.56.0)(svelte@4.2.12)(ts-node@10.9.2)
express:
specifier: ^4.19.2
version: 4.19.2
express-fileupload:
specifier: ^1.5.1
version: 1.5.1
@ -1538,6 +1547,9 @@ dependencies:
fork-ts-checker-webpack-plugin:
specifier: ~7.3.0
version: 7.3.0(typescript@5.3.3)(webpack@5.90.3)
form-data:
specifier: ^4.0.0
version: 4.0.0
gaxios:
specifier: ^5.0.1
version: 5.1.3
@ -1835,6 +1847,9 @@ dependencies:
winston-daily-rotate-file:
specifier: ^5.0.0
version: 5.0.0(winston@3.13.1)
ws:
specifier: ^8.18.0
version: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)
y-prosemirror:
specifier: ^1.2.1
version: 1.2.2(prosemirror-model@1.19.4)(y-protocols@1.0.6)(yjs@13.6.12)
@ -2604,7 +2619,7 @@ packages:
'@babel/traverse': 7.23.9
'@babel/types': 7.23.9
convert-source-map: 2.0.0
debug: 4.3.4
debug: 4.3.5
gensync: 1.0.0-beta.2
json5: 2.2.3
semver: 6.3.1
@ -3827,7 +3842,7 @@ packages:
'@babel/helper-split-export-declaration': 7.22.6
'@babel/parser': 7.23.9
'@babel/types': 7.23.9
debug: 4.3.4
debug: 4.3.5
globals: 11.12.0
transitivePeerDependencies:
- supports-color
@ -3949,7 +3964,7 @@ packages:
resolution: {integrity: sha512-aL+bFMIkpR0cmmj5Zgy0LMKEpgy43/hw5zadEArgmAMWWlKc5buwFvFT9G/o/YJkvXAJm5q3iuTuLaiaXW39sg==}
engines: {node: '>= 10.0.0'}
dependencies:
debug: 4.3.4
debug: 4.3.5
fs-extra: 9.1.0
promise-retry: 2.0.1
transitivePeerDependencies:
@ -3973,7 +3988,7 @@ packages:
hasBin: true
dependencies:
compare-version: 0.1.2
debug: 4.3.4
debug: 4.3.5
fs-extra: 10.1.0
isbinaryfile: 4.0.10
minimist: 1.2.8
@ -3988,7 +4003,7 @@ packages:
dependencies:
'@electron/asar': 3.2.10
'@malept/cross-spawn-promise': 1.1.1
debug: 4.3.4
debug: 4.3.5
dir-compare: 3.3.0
fs-extra: 9.1.0
minimatch: 3.1.2
@ -4637,7 +4652,7 @@ packages:
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies:
ajv: 6.12.6
debug: 4.3.4
debug: 4.3.5
espree: 9.6.1
globals: 13.24.0
ignore: 5.3.1
@ -4654,7 +4669,7 @@ packages:
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies:
ajv: 6.12.6
debug: 4.3.4
debug: 4.3.5
espree: 9.6.1
globals: 13.24.0
ignore: 5.3.1
@ -4809,7 +4824,7 @@ packages:
engines: {node: '>=10.10.0'}
dependencies:
'@humanwhocodes/object-schema': 2.0.2
debug: 4.3.4
debug: 4.3.5
minimatch: 3.1.2
transitivePeerDependencies:
- supports-color
@ -4820,7 +4835,7 @@ packages:
engines: {node: '>=10.10.0'}
dependencies:
'@humanwhocodes/object-schema': 1.2.1
debug: 4.3.4
debug: 4.3.5
minimatch: 3.1.2
transitivePeerDependencies:
- supports-color
@ -5579,7 +5594,7 @@ packages:
resolution: {integrity: sha512-9QOtNffcOF/c1seMCDnjckb3R9WHcG34tky+FHpNKKCW0wc/scYLwMtO+ptyGUfMW0/b/n4qRiALlaFHc9Oj7Q==}
engines: {node: '>= 10.0.0'}
dependencies:
debug: 4.3.4
debug: 4.3.5
fs-extra: 9.1.0
lodash: 4.17.21
tmp-promise: 3.0.3
@ -9540,7 +9555,7 @@ packages:
'@typescript-eslint/scope-manager': 5.62.0
'@typescript-eslint/type-utils': 5.62.0(eslint@8.56.0)(typescript@5.3.3)
'@typescript-eslint/utils': 5.62.0(eslint@8.56.0)(typescript@5.3.3)
debug: 4.3.4
debug: 4.3.5
eslint: 8.56.0
graphemer: 1.4.0
ignore: 5.3.1
@ -9594,7 +9609,7 @@ packages:
'@typescript-eslint/scope-manager': 5.62.0
'@typescript-eslint/types': 5.62.0
'@typescript-eslint/typescript-estree': 5.62.0(typescript@5.3.3)
debug: 4.3.4
debug: 4.3.5
eslint: 8.56.0
typescript: 5.3.3
transitivePeerDependencies:
@ -9650,7 +9665,7 @@ packages:
dependencies:
'@typescript-eslint/typescript-estree': 5.62.0(typescript@5.3.3)
'@typescript-eslint/utils': 5.62.0(eslint@8.56.0)(typescript@5.3.3)
debug: 4.3.4
debug: 4.3.5
eslint: 8.56.0
tsutils: 3.21.0(typescript@5.3.3)
typescript: 5.3.3
@ -9670,7 +9685,7 @@ packages:
dependencies:
'@typescript-eslint/typescript-estree': 6.21.0(typescript@5.3.3)
'@typescript-eslint/utils': 6.21.0(eslint@8.56.0)(typescript@5.3.3)
debug: 4.3.4
debug: 4.3.5
eslint: 8.56.0
ts-api-utils: 1.2.1(typescript@5.3.3)
typescript: 5.3.3
@ -9699,7 +9714,7 @@ packages:
dependencies:
'@typescript-eslint/types': 5.62.0
'@typescript-eslint/visitor-keys': 5.62.0
debug: 4.3.4
debug: 4.3.5
globby: 11.1.0
is-glob: 4.0.3
semver: 7.6.3
@ -9720,7 +9735,7 @@ packages:
dependencies:
'@typescript-eslint/types': 6.21.0
'@typescript-eslint/visitor-keys': 6.21.0
debug: 4.3.4
debug: 4.3.5
globby: 11.1.0
is-glob: 4.0.3
minimatch: 9.0.3
@ -10257,7 +10272,7 @@ packages:
resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==}
engines: {node: '>= 6.0.0'}
dependencies:
debug: 4.3.4
debug: 4.3.5
transitivePeerDependencies:
- supports-color
dev: false
@ -10266,7 +10281,7 @@ packages:
resolution: {integrity: sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==}
engines: {node: '>= 14'}
dependencies:
debug: 4.3.4
debug: 4.3.5
transitivePeerDependencies:
- supports-color
dev: false
@ -10475,7 +10490,7 @@ packages:
builder-util: 24.13.1
builder-util-runtime: 9.2.4
chromium-pickle-js: 0.2.0
debug: 4.3.4
debug: 4.3.5
dmg-builder: 24.13.3
ejs: 3.1.9
electron-publish: 24.13.1
@ -11323,7 +11338,7 @@ packages:
resolution: {integrity: sha512-upp+biKpN/XZMLim7aguUyW8s0FUpDvOtK6sbanMFDAMBzpHDqdhgVYm6zc9HJ6nWo7u2Lxk60i2M6Jd3aiNrA==}
engines: {node: '>=12.0.0'}
dependencies:
debug: 4.3.4
debug: 4.3.5
sax: 1.3.0
transitivePeerDependencies:
- supports-color
@ -11339,7 +11354,7 @@ packages:
builder-util-runtime: 9.2.4
chalk: 4.1.2
cross-spawn: 7.0.3
debug: 4.3.4
debug: 4.3.5
fs-extra: 10.1.0
http-proxy-agent: 5.0.0
https-proxy-agent: 5.0.1
@ -12449,7 +12464,7 @@ packages:
object-keys: 1.1.1
object.assign: 4.1.5
regexp.prototype.flags: 1.5.2
side-channel: 1.0.5
side-channel: 1.0.6
which-boxed-primitive: 1.0.2
which-collection: 1.0.1
which-typed-array: 1.1.14
@ -12617,7 +12632,7 @@ packages:
hasBin: true
dependencies:
address: 1.2.2
debug: 4.3.4
debug: 4.3.5
transitivePeerDependencies:
- supports-color
dev: false
@ -13264,7 +13279,7 @@ packages:
has-property-descriptors: 1.0.2
has-proto: 1.0.3
has-symbols: 1.0.3
hasown: 2.0.1
hasown: 2.0.2
internal-slot: 1.0.7
is-array-buffer: 3.0.4
is-callable: 1.2.7
@ -13410,13 +13425,13 @@ packages:
dependencies:
get-intrinsic: 1.2.4
has-tostringtag: 1.0.2
hasown: 2.0.1
hasown: 2.0.2
dev: false
/es-shim-unscopables@1.0.2:
resolution: {integrity: sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==}
dependencies:
hasown: 2.0.1
hasown: 2.0.2
dev: false
/es-to-primitive@1.2.1:
@ -13498,7 +13513,7 @@ packages:
peerDependencies:
esbuild: '>=0.12 <1'
dependencies:
debug: 4.3.4
debug: 4.3.5
esbuild: 0.18.20
transitivePeerDependencies:
- supports-color
@ -13990,7 +14005,7 @@ packages:
ajv: 6.12.6
chalk: 4.1.2
cross-spawn: 7.0.3
debug: 4.3.4
debug: 4.3.5
doctrine: 3.0.0
enquirer: 2.4.1
escape-string-regexp: 4.0.0
@ -14976,7 +14991,7 @@ packages:
function-bind: 1.1.2
has-proto: 1.0.3
has-symbols: 1.0.3
hasown: 2.0.1
hasown: 2.0.2
dev: false
/get-nonce@1.0.1:
@ -15667,7 +15682,7 @@ packages:
dependencies:
'@tootallnate/once': 2.0.0
agent-base: 6.0.2
debug: 4.3.4
debug: 4.3.5
transitivePeerDependencies:
- supports-color
dev: false
@ -15730,7 +15745,7 @@ packages:
engines: {node: '>= 6.0.0'}
dependencies:
agent-base: 5.1.1
debug: 4.3.4
debug: 4.3.5
transitivePeerDependencies:
- supports-color
dev: false
@ -15740,7 +15755,7 @@ packages:
engines: {node: '>= 6'}
dependencies:
agent-base: 6.0.2
debug: 4.3.4
debug: 4.3.5
transitivePeerDependencies:
- supports-color
dev: false
@ -15750,7 +15765,7 @@ packages:
engines: {node: '>= 14'}
dependencies:
agent-base: 7.1.1
debug: 4.3.4
debug: 4.3.5
transitivePeerDependencies:
- supports-color
dev: false
@ -15897,8 +15912,8 @@ packages:
engines: {node: '>= 0.4'}
dependencies:
es-errors: 1.3.0
hasown: 2.0.1
side-channel: 1.0.5
hasown: 2.0.2
side-channel: 1.0.6
dev: false
/interpret@3.1.1:
@ -16422,7 +16437,7 @@ packages:
resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==}
engines: {node: '>=10'}
dependencies:
debug: 4.3.4
debug: 4.3.5
istanbul-lib-coverage: 3.2.2
source-map: 0.6.1
transitivePeerDependencies:
@ -19877,7 +19892,7 @@ packages:
engines: {node: '>=8.16.0'}
dependencies:
'@types/mime-types': 2.1.4
debug: 4.3.4
debug: 4.3.5
extract-zip: 1.7.0
https-proxy-agent: 4.0.0
mime: 2.6.0
@ -19930,7 +19945,7 @@ packages:
resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==}
engines: {node: '>=0.6'}
dependencies:
side-channel: 1.0.5
side-channel: 1.0.6
dev: false
/qs@6.11.2:
@ -20344,7 +20359,7 @@ packages:
resolution: {integrity: sha512-efCx3b+0Z69/LGJmm9Yvi4cqEdxnoGnxYxGxBghkkTTFeXRtTCmmhO0AnAfHz59k957uTSuy8WaHqOs8wbYUWg==}
engines: {node: '>=6'}
dependencies:
debug: 4.3.4
debug: 4.3.5
module-details-from-path: 1.0.3
resolve: 1.22.8
transitivePeerDependencies:
@ -21149,7 +21164,7 @@ packages:
/spdy-transport@3.0.0:
resolution: {integrity: sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==}
dependencies:
debug: 4.3.4
debug: 4.3.5
detect-node: 2.1.0
hpack.js: 2.1.6
obuf: 1.1.2
@ -21163,7 +21178,7 @@ packages:
resolution: {integrity: sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==}
engines: {node: '>=6.0.0'}
dependencies:
debug: 4.3.4
debug: 4.3.5
handle-thing: 2.0.1
http-deceiver: 1.2.7
select-hose: 2.0.0
@ -21586,7 +21601,7 @@ packages:
dependencies:
component-emitter: 1.3.1
cookiejar: 2.1.4
debug: 4.3.4
debug: 4.3.5
fast-safe-stringify: 2.1.1
form-data: 4.0.0
formidable: 2.1.2
@ -25338,6 +25353,40 @@ packages:
- ts-node
dev: false
file:projects/datalake.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-pqgfJAfjDTa3AWRK263xljvkd1GLinDFrjTGW7res8krRskMMJ3K6gj3kfnLjyKmWeAesJQ5CSnFybPnPSJq/Q==, tarball: file:projects/datalake.tgz}
id: file:projects/datalake.tgz
name: '@rush-temp/datalake'
version: 0.0.0
dependencies:
'@types/jest': 29.5.12
'@types/node': 20.11.19
'@types/node-fetch': 2.6.11
'@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3)
'@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.3.3)
eslint: 8.56.0
eslint-config-standard-with-typescript: 40.0.0(@typescript-eslint/eslint-plugin@6.21.0)(eslint-plugin-import@2.29.1)(eslint-plugin-n@15.7.0)(eslint-plugin-promise@6.1.1)(eslint@8.56.0)(typescript@5.3.3)
eslint-plugin-import: 2.29.1(eslint@8.56.0)
eslint-plugin-n: 15.7.0(eslint@8.56.0)
eslint-plugin-promise: 6.1.1(eslint@8.56.0)
form-data: 4.0.0
jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2)
node-fetch: 2.7.0
prettier: 3.2.5
ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3)
typescript: 5.3.3
transitivePeerDependencies:
- '@babel/core'
- '@jest/types'
- babel-jest
- babel-plugin-macros
- encoding
- esbuild
- node-notifier
- supports-color
- ts-node
dev: false
file:projects/desktop-1.tgz(webpack@5.90.3):
resolution: {integrity: sha512-Fkk5uNa4NwlCVU5yJUf4X3FcGljXj0qUH7iyoCEAk8EKQ/Mi3OfG/KqK03kEeuM1KP8T1CtArtUhWDsM66/AFQ==, tarball: file:projects/desktop-1.tgz}
id: file:projects/desktop-1.tgz
@ -27110,7 +27159,7 @@ packages:
dev: false
file:projects/model-all.tgz:
resolution: {integrity: sha512-iZAri5sHzRIutIt2bdrG1zI/QAu/lt1KC14lDRvHKLCaEtThF4RLlBqYHQ/rZM8aZyHIiGLwVztxvzR8D5EnZA==, tarball: file:projects/model-all.tgz}
resolution: {integrity: sha512-GYOekXK7++TNstTuTVtmUp1crs2chkWBmgdMBGzWaKVnuYbvHCKSfaNhaZfnAKV3ZnXc4nrbFbq6MUZgu97hnQ==, tarball: file:projects/model-all.tgz}
name: '@rush-temp/model-all'
version: 0.0.0
dependencies:
@ -29515,7 +29564,7 @@ packages:
dev: false
file:projects/pod-server.tgz:
resolution: {integrity: sha512-91Ac7EN5mpCKBG8sIWUljU+qSb6mtEJNVt9TynP8U0++Bhlmyvcryd4k2rMC5S+M/GvmlikUpAZ+MdIBsqUM+g==, tarball: file:projects/pod-server.tgz}
resolution: {integrity: sha512-cbeFqSKcEIP6H/55Ux47qcZ1os7nwgBryGQGIF3DILmB/1J7wc6dlbJizE6S7/f3PON5c3H5s7VY2NmpVzhD5g==, tarball: file:projects/pod-server.tgz}
name: '@rush-temp/pod-server'
version: 0.0.0
dependencies:
@ -29650,7 +29699,7 @@ packages:
dev: false
file:projects/pod-telegram-bot.tgz(bufferutil@4.0.8)(utf-8-validate@6.0.4):
resolution: {integrity: sha512-T7EdAT4nMEnEDROxVBBAwOD7ssjiSl6eOr96YFwfZvHlbaR6wJm/XYNzwOM61oc0+rjfnmkmM+FzOwTSifePEA==, tarball: file:projects/pod-telegram-bot.tgz}
resolution: {integrity: sha512-nHK8VvEEKMela0QxFYFIFHgIjnQj8A01bct7tu+T54i9PP0bgk1zuS+6neAPX45/su+hRG+oiXjdXnuAPMYv3w==, tarball: file:projects/pod-telegram-bot.tgz}
id: file:projects/pod-telegram-bot.tgz
name: '@rush-temp/pod-telegram-bot'
version: 0.0.0
@ -29835,13 +29884,14 @@ packages:
dev: false
file:projects/presentation.tgz(@types/node@20.11.19)(esbuild@0.20.1)(postcss-load-config@4.0.2)(postcss@8.4.35)(ts-node@10.9.2):
resolution: {integrity: sha512-qkCAH3xI1PgIlcZMYba7hsH3mpizW9nfMF13DaxgTu7glkC4ycPl+CHxhiqUadpHPfPQXNk6g7mNj2fS9ZyuGw==, tarball: file:projects/presentation.tgz}
resolution: {integrity: sha512-r+NP0EMgEeKbfaa4v8P1Iho0cfYqe9PhOBfV6SPd/9xnNPt42nK9Gu4r5so1LTolhEUzbFiKh7zSX1ADL5e/3g==, tarball: file:projects/presentation.tgz}
id: file:projects/presentation.tgz
name: '@rush-temp/presentation'
version: 0.0.0
dependencies:
'@types/jest': 29.5.12
'@types/png-chunks-extract': 1.0.2
'@types/uuid': 8.3.4
'@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3)
'@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.3.3)
eslint: 8.56.0
@ -29863,6 +29913,7 @@ packages:
svelte-preprocess: 5.1.3(postcss-load-config@4.0.2)(postcss@8.4.35)(sass@1.71.1)(svelte@4.2.12)(typescript@5.3.3)
ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3)
typescript: 5.3.3
uuid: 8.3.2
transitivePeerDependencies:
- '@babel/core'
- '@jest/types'
@ -30161,7 +30212,7 @@ packages:
dev: false
file:projects/qms-doc-import-tool.tgz:
resolution: {integrity: sha512-mzTjks1peZbU8165Sd1xaoYs9H9f37XszY2zoxtggnaJrP1GeEktuX0TtNTz1XJRsMOg0+0K3s3CRYUEM9+cYw==, tarball: file:projects/qms-doc-import-tool.tgz}
resolution: {integrity: sha512-5CfQNuO9R7VNOvU84mR2y/EPQW9tQFUOJ1hX+BWtAGu0TIyO7fHfj57SqGMV8yFqGxjDfBpDpPigZnCM6OUN8A==, tarball: file:projects/qms-doc-import-tool.tgz}
name: '@rush-temp/qms-doc-import-tool'
version: 0.0.0
dependencies:
@ -30746,7 +30797,7 @@ packages:
dev: false
file:projects/s3.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-m3PN2etEQkB2hQVrl2Bd5QgHzOQ+1C4e/zk30MuQoIqQiCDR3Uf6/ok0hqza3H69KWkkKjIC5rPihVEfkNt8aA==, tarball: file:projects/s3.tgz}
resolution: {integrity: sha512-K8tCIa7XhsCfCud4PK5ap0pAoF4fGanurF5AJ7Otx97p6m170W2DJIkiplk2x41Ksy9+Zw9dEF2VWRgdDrWX5A==, tarball: file:projects/s3.tgz}
id: file:projects/s3.tgz
name: '@rush-temp/s3'
version: 0.0.0
@ -32056,7 +32107,7 @@ packages:
dev: false
file:projects/server-notification.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-jGABHblqLJRd4EjN6dDbXqUJQxr88DjqFkyTva4La4HBrK1ge4g0v6xF450nn7BSOPxLtTZs+60UzaY9lLi0qg==, tarball: file:projects/server-notification.tgz}
resolution: {integrity: sha512-aTPQrWC0ymA+iBGLDS9+d8HJvvXCXRdj+XdAe6irRywRPzKjc+niHzm+wVMLRTELYR3RWt/HTGZBVeRSMRgS4g==, tarball: file:projects/server-notification.tgz}
id: file:projects/server-notification.tgz
name: '@rush-temp/server-notification'
version: 0.0.0
@ -32087,7 +32138,7 @@ packages:
dev: false
file:projects/server-pipeline.tgz:
resolution: {integrity: sha512-VMd/X1M3HotOPN51cVDAHjKrHhNm8/5GMMiZ4WklnQVtDfmDHocXKsC+FgOLXpb9RHYhtsz0Arwi7pW7xIar5A==, tarball: file:projects/server-pipeline.tgz}
resolution: {integrity: sha512-qGocP2RKEaAspOP4lJZzIYkjIk/hbsYMGGyC4oO62GgWyfBw3O6o2u2R9sU/Jtkkeu2/GBIxkkAc9XEzQ4LbDQ==, tarball: file:projects/server-pipeline.tgz}
name: '@rush-temp/server-pipeline'
version: 0.0.0
dependencies:
@ -32338,7 +32389,7 @@ packages:
dev: false
file:projects/server-storage.tgz(esbuild@0.20.1):
resolution: {integrity: sha512-nuggpJP7L/8s3sSJ393e8cISWODMQoCKtqyewY8lN7+n8gnL7RHlVDdinb4N9qvUACoSoExtK41dqAHoTnZxlQ==, tarball: file:projects/server-storage.tgz}
resolution: {integrity: sha512-8Vd7+fEnqGcQHOUD4gWdMOlB6X3Ka7kOji7RfrZdlD6es58ykSk5MXH5pPXE0hWJptxlJ0DgLXAAYdptrKCFQQ==, tarball: file:projects/server-storage.tgz}
id: file:projects/server-storage.tgz
name: '@rush-temp/server-storage'
version: 0.0.0
@ -34157,7 +34208,7 @@ packages:
dev: false
file:projects/tool.tgz(bufferutil@4.0.8)(utf-8-validate@6.0.4):
resolution: {integrity: sha512-lU7I2J+om0YHTY8FtcR2DtsEo4wSjZFP7FxAmpPgzFRSz7SwPtvdNDiXlM5zdMLiDszCXxUE/fNluCcxOeO8rg==, tarball: file:projects/tool.tgz}
resolution: {integrity: sha512-eZQ8XE+deR+AVtEVrG2p1xfsPLpLfDF4HkcWFNK9SXarUz05pP5xp2KPLXLiVYBbG81Qx56t5jjWi6bMaboTwQ==, tarball: file:projects/tool.tgz}
id: file:projects/tool.tgz
name: '@rush-temp/tool'
version: 0.0.0

View File

@ -201,6 +201,7 @@ export async function configurePlatform (): Promise<void> {
setMetadata(login.metadata.AccountsUrl, config.ACCOUNTS_URL)
setMetadata(presentation.metadata.UploadURL, config.UPLOAD_URL)
setMetadata(presentation.metadata.FilesURL, config.FILES_URL)
setMetadata(presentation.metadata.CollaboratorUrl, config.COLLABORATOR_URL)
setMetadata(presentation.metadata.CollaboratorApiUrl, config.COLLABORATOR_API_URL)
setMetadata(presentation.metadata.PreviewConfig, parsePreviewConfig(config.PREVIEW_CONFIG))

View File

@ -70,7 +70,9 @@ const expose: IPCMainExposed = {
...serverConfig,
...mainConfig,
INITIAL_URL: openArg ?? '',
UPLOAD_URL: concatLink(mainConfig.FRONT_URL, serverConfig.UPLOAD_URL),
UPLOAD_URL: (serverConfig.UPLOAD_URL as string).includes('://')
? serverConfig.UPLOAD_URL
: concatLink(mainConfig.FRONT_URL, serverConfig.UPLOAD_URL),
MODEL_VERSION: mainConfig.MODEL_VERSION,
VERSION: mainConfig.VERSION
}

View File

@ -8,6 +8,7 @@ export interface Config {
COLLABORATOR_URL: string
COLLABORATOR_API_URL: string
FRONT_URL: string
FILES_URL: string
UPLOAD_URL: string
MODEL_VERSION?: string
VERSION?: string

View File

@ -77,7 +77,6 @@ services:
- COLLABORATOR_PORT=3078
- SECRET=secret
- ACCOUNTS_URL=http://account:3000
- UPLOAD_URL=/files
- MONGO_URL=mongodb://mongodb:27017?compressors=snappy
- 'MONGO_OPTIONS={"appName":"collaborator","maxPoolSize":2}'
- STORAGE_CONFIG=${STORAGE_CONFIG}
@ -100,7 +99,7 @@ services:
- MONGO_URL=mongodb://mongodb:27017?compressors=snappy
- 'MONGO_OPTIONS={"appName":"front","maxPoolSize":1}'
- ACCOUNTS_URL=http://localhost:3000
- UPLOAD_URL=/files
- UPLOAD_URL=/files
- ELASTIC_URL=http://elastic:9200
- GMAIL_URL=http://localhost:8088
- CALENDAR_URL=http://localhost:8095
@ -116,7 +115,7 @@ services:
- DESKTOP_UPDATES_URL=https://dist.huly.io
- DESKTOP_UPDATES_CHANNEL=dev
- BRANDING_URL=http://localhost:8087/branding.json
restart: unless-stopped
restart: unless-stopped
transactor:
image: hardcoreeng/transactor
links:
@ -145,7 +144,6 @@ services:
- STORAGE_CONFIG=${STORAGE_CONFIG}
- REKONI_URL=http://rekoni:4004
- FRONT_URL=http://localhost:8087
- UPLOAD_URL=http://localhost:8087/files
# - APM_SERVER_URL=http://apm-server:8200
- SES_URL=''
- ACCOUNTS_URL=http://account:3000

View File

@ -120,6 +120,7 @@ import { Analytics } from '@hcengineering/analytics'
export interface Config {
ACCOUNTS_URL: string
UPLOAD_URL: string
FILES_URL: string
MODEL_VERSION: string
VERSION: string
COLLABORATOR_URL: string
@ -284,6 +285,7 @@ export async function configurePlatform() {
// tryOpenInDesktopApp(config.APP_PROTOCOL ?? 'huly://')
setMetadata(login.metadata.AccountsUrl, config.ACCOUNTS_URL)
setMetadata(presentation.metadata.FilesURL, config.FILES_URL)
setMetadata(presentation.metadata.UploadURL, config.UPLOAD_URL)
setMetadata(presentation.metadata.CollaboratorUrl, config.COLLABORATOR_URL)
setMetadata(presentation.metadata.CollaboratorApiUrl, config.COLLABORATOR_API_URL)

View File

@ -36,7 +36,8 @@
"ts-jest": "^29.1.1",
"@types/jest": "^29.5.5",
"svelte-eslint-parser": "^0.33.1",
"@types/png-chunks-extract": "^1.0.2"
"@types/png-chunks-extract": "^1.0.2",
"@types/uuid": "^8.3.1"
},
"dependencies": {
"@hcengineering/platform": "^0.6.11",
@ -52,7 +53,8 @@
"@hcengineering/client": "^0.6.18",
"@hcengineering/collaborator-client": "^0.6.4",
"fast-equals": "^5.0.1",
"png-chunks-extract": "^1.0.0"
"png-chunks-extract": "^1.0.0",
"uuid": "^8.3.2"
},
"repository": "https://github.com/hcengineering/platform",
"publishConfig": {

View File

@ -18,9 +18,9 @@
import presentation from '../plugin'
import { getPreviewType, previewTypes } from '../file'
import { getFileUrl } from '../file'
import { getPreviewType, previewTypes } from '../filetypes'
import { BlobMetadata, FilePreviewExtension } from '../types'
import { getFileUrl } from '../utils'
export let file: Ref<Blob>
export let name: string

View File

@ -20,8 +20,8 @@
import presentation from '../plugin'
import { getFileUrl } from '../file'
import { BlobMetadata } from '../types'
import { getFileUrl } from '../utils'
import ActionContext from './ActionContext.svelte'
import FilePreview from './FilePreview.svelte'

View File

@ -14,26 +14,78 @@
//
import { concatLink, type Blob, type Ref } from '@hcengineering/core'
import { PlatformError, Severity, Status, getMetadata, getResource } from '@hcengineering/platform'
import { type PopupAlignment } from '@hcengineering/ui'
import { writable } from 'svelte/store'
import { PlatformError, Severity, Status, getMetadata } from '@hcengineering/platform'
import { v4 as uuid } from 'uuid'
import plugin from './plugin'
import type { BlobMetadata, FileOrBlob, FilePreviewExtension } from './types'
import { createQuery } from './utils'
import { decodeTokenPayload } from './utils'
interface FileUploadError {
key: string
error: string
}
interface FileUploadSuccess {
key: string
id: string
}
type FileUploadResult = FileUploadSuccess | FileUploadError
const defaultUploadUrl = '/files'
const defaultFilesUrl = '/files/:workspace/:filename?file=:blobId&workspace=:workspace'
function getFilesUrl (): string {
const filesUrl = getMetadata(plugin.metadata.FilesURL) ?? defaultFilesUrl
const frontUrl = getMetadata(plugin.metadata.FrontUrl) ?? window.location.origin
return filesUrl.includes('://') ? filesUrl : concatLink(frontUrl, filesUrl)
}
export function getCurrentWorkspace (): string {
return decodeTokenPayload(getMetadata(plugin.metadata.Token) ?? '').workspace
}
/**
* @public
*/
export function generateFileId (): string {
return uuid()
}
/**
* @public
*/
export function getUploadUrl (): string {
const template = getMetadata(plugin.metadata.UploadURL) ?? defaultUploadUrl
return template.replaceAll(':workspace', encodeURIComponent(getCurrentWorkspace()))
}
/**
* @public
*/
export function getFileUrl (file: string, filename?: string): string {
if (file.includes('://')) {
return file
}
const template = getFilesUrl()
return template
.replaceAll(':filename', encodeURIComponent(filename ?? ''))
.replaceAll(':workspace', encodeURIComponent(getCurrentWorkspace()))
.replaceAll(':blobId', encodeURIComponent(file))
}
/**
* @public
*/
export async function uploadFile (file: File): Promise<Ref<Blob>> {
const uploadUrl = getMetadata(plugin.metadata.UploadURL)
if (uploadUrl === undefined) {
throw Error('UploadURL is not defined')
}
const uploadUrl = getUploadUrl()
const id = generateFileId()
const data = new FormData()
data.append('file', file)
data.append('file', file, id)
const resp = await fetch(uploadUrl, {
method: 'POST',
@ -51,17 +103,25 @@ export async function uploadFile (file: File): Promise<Ref<Blob>> {
}
}
return (await resp.text()) as Ref<Blob>
const result = (await resp.json()) as FileUploadResult[]
if (result.length !== 1) {
throw Error('Bad upload response')
}
if ('error' in result[0]) {
throw Error(`Failed to upload file: ${result[0].error}`)
}
return id as Ref<Blob>
}
/**
* @public
*/
export async function deleteFile (id: string): Promise<void> {
const uploadUrl = getMetadata(plugin.metadata.UploadURL) ?? ''
const fileUrl = getFileUrl(id)
const url = concatLink(uploadUrl, `?file=${id}`)
const resp = await fetch(url, {
const resp = await fetch(fileUrl, {
method: 'DELETE',
headers: {
Authorization: 'Bearer ' + (getMetadata(plugin.metadata.Token) as string)
@ -72,108 +132,3 @@ export async function deleteFile (id: string): Promise<void> {
throw new Error('Failed to delete file')
}
}
/**
* @public
*/
export async function getFileMetadata (file: FileOrBlob, uuid: Ref<Blob>): Promise<BlobMetadata | undefined> {
const previewType = await getPreviewType(file.type, $previewTypes)
if (previewType?.metadataProvider === undefined) {
return undefined
}
const metadataProvider = await getResource(previewType.metadataProvider)
if (metadataProvider === undefined) {
return undefined
}
return await metadataProvider(file, uuid)
}
/**
* @public
*/
export const previewTypes = writable<FilePreviewExtension[]>([])
const previewTypesQuery = createQuery(true)
previewTypesQuery.query(plugin.class.FilePreviewExtension, {}, (result) => {
previewTypes.set(result)
})
let $previewTypes: FilePreviewExtension[] = []
previewTypes.subscribe((it) => {
$previewTypes = it
})
/**
* @public
*/
export async function canPreviewFile (contentType: string, _previewTypes: FilePreviewExtension[]): Promise<boolean> {
for (const previewType of _previewTypes) {
if (await isApplicableType(previewType, contentType)) {
return true
}
}
return false
}
/**
* @public
*/
export async function getPreviewType (
contentType: string,
_previewTypes: FilePreviewExtension[]
): Promise<FilePreviewExtension | undefined> {
const applicableTypes: FilePreviewExtension[] = []
for (const previewType of _previewTypes) {
if (await isApplicableType(previewType, contentType)) {
applicableTypes.push(previewType)
}
}
return applicableTypes.sort(comparePreviewTypes)[0]
}
/**
* @public
*/
export function getPreviewAlignment (contentType: string): PopupAlignment {
if (contentType.startsWith('image/')) {
return 'centered'
} else if (contentType.startsWith('video/')) {
return 'centered'
} else {
return 'float'
}
}
function getPreviewTypeRegExp (type: string): RegExp {
return new RegExp(`^${type.replaceAll('/', '\\/').replaceAll('*', '.*')}$`)
}
async function isApplicableType (
{ contentType, availabilityChecker }: FilePreviewExtension,
_contentType: string
): Promise<boolean> {
const checkAvailability = availabilityChecker !== undefined ? await getResource(availabilityChecker) : undefined
const isAvailable: boolean = checkAvailability === undefined || (await checkAvailability())
return (
isAvailable &&
(Array.isArray(contentType) ? contentType : [contentType]).some((type) =>
getPreviewTypeRegExp(type).test(_contentType)
)
)
}
function comparePreviewTypes (a: FilePreviewExtension, b: FilePreviewExtension): number {
if (a.order === undefined && b.order === undefined) {
return 0
} else if (a.order === undefined) {
return -1
} else if (b.order === undefined) {
return 1
} else {
return a.order - b.order
}
}

View File

@ -0,0 +1,128 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { type Blob, type Ref } from '@hcengineering/core'
import { getResource } from '@hcengineering/platform'
import { type PopupAlignment } from '@hcengineering/ui'
import { writable } from 'svelte/store'
import plugin from './plugin'
import type { BlobMetadata, FileOrBlob, FilePreviewExtension } from './types'
import { createQuery } from './utils'
/**
* @public
*/
export async function getFileMetadata (file: FileOrBlob, uuid: Ref<Blob>): Promise<BlobMetadata | undefined> {
const previewType = await getPreviewType(file.type, $previewTypes)
if (previewType?.metadataProvider === undefined) {
return undefined
}
const metadataProvider = await getResource(previewType.metadataProvider)
if (metadataProvider === undefined) {
return undefined
}
return await metadataProvider(file, uuid)
}
/**
* @public
*/
export const previewTypes = writable<FilePreviewExtension[]>([])
const previewTypesQuery = createQuery(true)
previewTypesQuery.query(plugin.class.FilePreviewExtension, {}, (result) => {
previewTypes.set(result)
})
let $previewTypes: FilePreviewExtension[] = []
previewTypes.subscribe((it) => {
$previewTypes = it
})
/**
* @public
*/
export async function canPreviewFile (contentType: string, _previewTypes: FilePreviewExtension[]): Promise<boolean> {
for (const previewType of _previewTypes) {
if (await isApplicableType(previewType, contentType)) {
return true
}
}
return false
}
/**
* @public
*/
export async function getPreviewType (
contentType: string,
_previewTypes: FilePreviewExtension[]
): Promise<FilePreviewExtension | undefined> {
const applicableTypes: FilePreviewExtension[] = []
for (const previewType of _previewTypes) {
if (await isApplicableType(previewType, contentType)) {
applicableTypes.push(previewType)
}
}
return applicableTypes.sort(comparePreviewTypes)[0]
}
/**
* @public
*/
export function getPreviewAlignment (contentType: string): PopupAlignment {
if (contentType.startsWith('image/')) {
return 'centered'
} else if (contentType.startsWith('video/')) {
return 'centered'
} else {
return 'float'
}
}
function getPreviewTypeRegExp (type: string): RegExp {
return new RegExp(`^${type.replaceAll('/', '\\/').replaceAll('*', '.*')}$`)
}
async function isApplicableType (
{ contentType, availabilityChecker }: FilePreviewExtension,
_contentType: string
): Promise<boolean> {
const checkAvailability = availabilityChecker !== undefined ? await getResource(availabilityChecker) : undefined
const isAvailable: boolean = checkAvailability === undefined || (await checkAvailability())
return (
isAvailable &&
(Array.isArray(contentType) ? contentType : [contentType]).some((type) =>
getPreviewTypeRegExp(type).test(_contentType)
)
)
}
function comparePreviewTypes (a: FilePreviewExtension, b: FilePreviewExtension): number {
if (a.order === undefined && b.order === undefined) {
return 0
} else if (a.order === undefined) {
return -1
} else if (b.order === undefined) {
return 1
} else {
return a.order - b.order
}
}

View File

@ -51,6 +51,7 @@ export { default } from './plugin'
export * from './types'
export * from './utils'
export * from './file'
export * from './filetypes'
export * from './drafts'
export { presentationId }
export * from './collaborator'

View File

@ -130,10 +130,12 @@ export default plugin(presentationId, {
FrontVersion: '' as Metadata<string>,
Draft: '' as Metadata<Record<string, any>>,
UploadURL: '' as Metadata<string>,
FilesURL: '' as Metadata<string>,
CollaboratorUrl: '' as Metadata<string>,
CollaboratorApiUrl: '' as Metadata<string>,
Token: '' as Metadata<string>,
Endpoint: '' as Metadata<string>,
Workspace: '' as Metadata<string>,
FrontUrl: '' as Asset,
PreviewConfig: '' as Metadata<PreviewConfig | undefined>,
ClientHook: '' as Metadata<ClientHook>,

View File

@ -1,14 +1,15 @@
import type { Blob, Ref } from '@hcengineering/core'
import { concatLink } from '@hcengineering/core'
import { getMetadata } from '@hcengineering/platform'
import { getCurrentWorkspaceUrl, getFileUrl } from '.'
import { getFileUrl, getCurrentWorkspace } from './file'
import presentation from './plugin'
export interface PreviewConfig {
previewUrl: string
}
const defaultPreview = (): string => `/files/${getCurrentWorkspaceUrl()}?file=:blobId&size=:size`
const defaultPreview = (): string => `/files/${getCurrentWorkspace()}?file=:blobId&size=:size`
/**
*
@ -53,7 +54,7 @@ export function getSrcSet (_blob: Ref<Blob>, width?: number): string {
}
function blobToSrcSet (cfg: PreviewConfig, blob: Ref<Blob>, width: number | undefined): string {
let url = cfg.previewUrl.replaceAll(':workspace', encodeURIComponent(getCurrentWorkspaceUrl()))
let url = cfg.previewUrl.replaceAll(':workspace', encodeURIComponent(getCurrentWorkspace()))
const downloadUrl = getFileUrl(blob)
const frontUrl = getMetadata(presentation.metadata.FrontUrl) ?? window.location.origin
@ -74,7 +75,7 @@ function blobToSrcSet (cfg: PreviewConfig, blob: Ref<Blob>, width: number | unde
fu.replaceAll(':size', `${width * 3}`) +
' 3x'
} else {
result += fu.replaceAll(':size', `${-1}`)
result += downloadUrl
}
return result

View File

@ -18,7 +18,6 @@ import { Analytics } from '@hcengineering/analytics'
import core, {
TxOperations,
TxProcessor,
concatLink,
getCurrentAccount,
reduceCalls,
type AnyAttribute,
@ -34,7 +33,6 @@ import core, {
type Hierarchy,
type Mixin,
type Obj,
type Blob as PlatformBlob,
type Ref,
type RefTo,
type SearchOptions,
@ -457,22 +455,6 @@ export function getCurrentWorkspaceUrl (): string {
return wsId
}
/**
* @public
*/
export function getFileUrl (file: Ref<PlatformBlob>, filename?: string, useToken?: boolean): string {
if (file.includes('://')) {
return file
}
const frontUrl = getMetadata(plugin.metadata.FrontUrl) ?? window.location.origin
let uploadUrl = getMetadata(plugin.metadata.UploadURL) ?? ''
if (!uploadUrl.includes('://')) {
uploadUrl = concatLink(frontUrl ?? '', uploadUrl)
}
const token = getMetadata(plugin.metadata.Token) ?? ''
return `${uploadUrl}/${getCurrentWorkspaceUrl()}${filename !== undefined ? '/' + encodeURIComponent(filename) : ''}?file=${file}${useToken === true ? `&token=${token}` : ''}`
}
export function sizeToWidth (size: string): number | undefined {
let width: number | undefined
switch (size) {

View File

@ -64,6 +64,8 @@ export interface StorageAdapter {
offset: number,
length?: number
) => Promise<Readable>
getUrl: (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string) => Promise<string>
}
export interface StorageAdapterEx extends StorageAdapter {
@ -161,6 +163,10 @@ export class DummyStorageAdapter implements StorageAdapter, StorageAdapterEx {
): Promise<UploadedObjectInfo> {
throw new Error('not implemented')
}
async getUrl (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<string> {
throw new Error('not implemented')
}
}
export function createDummyStorageAdapter (): StorageAdapter {

View File

@ -55,7 +55,7 @@
async function doSync (): Promise<void> {
loading = true
const uploadUrl = window.location.origin + getMetadata(presentation.metadata.UploadURL)
const frontUrl = getMetadata(presentation.metadata.FrontUrl) ?? window.location.origin
const token = (getMetadata(presentation.metadata.Token) as string) ?? ''
const mappedFilter: Record<string, any> = {}
@ -75,7 +75,7 @@
email: '',
endpoint: ''
},
frontUrl: uploadUrl,
frontUrl,
monitor: (total: number) => {
docsProcessed++
state = `processed: ${docsProcessed}/${total ?? 1}`

View File

@ -12,7 +12,6 @@ import core, {
import login, { loginId } from '@hcengineering/login'
import { getMetadata, getResource, setMetadata } from '@hcengineering/platform'
import presentation, { closeClient, refreshClient, setClient, setPresentationCookie } from '@hcengineering/presentation'
import { getCurrentWorkspaceUrl } from '@hcengineering/presentation/src/utils'
import { fetchMetadataLocalStorage, getCurrentLocation, navigate, setMetadataLocalStorage } from '@hcengineering/ui'
import { writable } from 'svelte/store'
@ -35,8 +34,6 @@ export async function connect (title: string): Promise<Client | undefined> {
}
setMetadata(presentation.metadata.Token, token)
setPresentationCookie(token, getCurrentWorkspaceUrl())
const selectWorkspace = await getResource(login.function.SelectWorkspace)
const workspaceLoginInfo = (await selectWorkspace(ws, token))[1]
if (workspaceLoginInfo == null) {
@ -46,7 +43,10 @@ export async function connect (title: string): Promise<Client | undefined> {
return
}
setPresentationCookie(token, workspaceLoginInfo.workspaceId)
setMetadata(presentation.metadata.Token, token)
setMetadata(presentation.metadata.Workspace, workspaceLoginInfo.workspace)
setMetadata(presentation.metadata.Endpoint, workspaceLoginInfo.endpoint)
if (_token !== token && _client !== undefined) {
@ -178,9 +178,13 @@ function clearMetadata (ws: string): void {
delete tokens[loc.path[1]]
setMetadataLocalStorage(login.metadata.LoginTokens, tokens)
}
const currentWorkspace = getMetadata(presentation.metadata.Workspace)
if (currentWorkspace !== undefined) {
setPresentationCookie('', currentWorkspace)
}
setMetadata(presentation.metadata.Token, null)
setMetadataLocalStorage(login.metadata.LastToken, null)
setPresentationCookie('', getCurrentWorkspaceUrl())
setMetadataLocalStorage(login.metadata.LoginEmail, null)
void closeClient()
}

View File

@ -440,6 +440,7 @@ export function navigateToWorkspace (
return
}
setMetadata(presentation.metadata.Token, loginInfo.token)
setMetadata(presentation.metadata.Workspace, loginInfo.workspace)
setLoginInfo(loginInfo)
if (navigateUrl !== undefined) {
@ -894,6 +895,7 @@ export async function afterConfirm (clearQuery = false): Promise<void> {
const result = (await selectWorkspace(joinedWS[0].workspace, null))[1]
if (result !== undefined) {
setMetadata(presentation.metadata.Token, result.token)
setMetadata(presentation.metadata.Workspace, result.workspace)
setMetadataLocalStorage(login.metadata.LastToken, result.token)
setLoginInfo(result)

View File

@ -42,6 +42,7 @@ export interface Workspace {
*/
export interface WorkspaceLoginInfo extends LoginInfo {
workspace: string
workspaceId: string
creating?: boolean
createProgress?: number
}

View File

@ -61,7 +61,7 @@ export const ImageUploadExtension = Extension.create<ImageUploadOptions>({
for (const uri of uris) {
if (uri !== '') {
const url = new URL(uri)
// TODO datalake support
const _file = (url.searchParams.get('file') ?? '').split('/').join('')
if (_file.trim().length === 0) {

View File

@ -15,7 +15,7 @@
import { type Blob, type Ref, generateId } from '@hcengineering/core'
import { getMetadata } from '@hcengineering/platform'
import presentation, { getFileMetadata } from '@hcengineering/presentation'
import presentation, { generateFileId, getFileMetadata, getUploadUrl } from '@hcengineering/presentation'
import { getCurrentLanguage } from '@hcengineering/theme'
import type { FileUploadCallback, FileUploadOptions } from '@hcengineering/uploader'
@ -72,14 +72,26 @@ export function getUppy (options: FileUploadOptions, onFileUploaded?: FileUpload
}
const uppy = new Uppy<UppyMeta, UppyBody>(uppyOptions).use(XHR, {
endpoint: getMetadata(presentation.metadata.UploadURL) ?? '',
endpoint: getUploadUrl(),
method: 'POST',
headers: {
Authorization: 'Bearer ' + (getMetadata(presentation.metadata.Token) as string)
},
getResponseData: (body: string): UppyBody => {
return {
uuid: body
}
// getResponseData: (body: string): UppyBody => {
// const data = JSON.parse(body)
// return {
// uuid: data[0].id
// }
// }
})
uppy.addPreProcessor(async (fileIds: string[]) => {
for (const fileId of fileIds) {
const file = uppy.getFile(fileId)
if (file != null) {
const uuid = generateFileId()
file.meta.uuid = uuid
file.meta.name = uuid
}
}
})
@ -88,7 +100,7 @@ export function getUppy (options: FileUploadOptions, onFileUploaded?: FileUpload
uppy.addPostProcessor(async (fileIds: string[]) => {
for (const fileId of fileIds) {
const file = uppy.getFile(fileId)
const uuid = file?.response?.body?.uuid as Ref<Blob>
const uuid = file.meta.uuid as Ref<Blob>
if (uuid !== undefined) {
const metadata = await getFileMetadata(file.data, uuid)
await onFileUploaded(uuid, file.name, file.data, file.meta.relativePath, metadata)

View File

@ -37,13 +37,14 @@
</div>
{/if}
<img
on:load={(evt) => {
on:load={() => {
loading = false
}}
class="object-contain mx-auto"
style:max-width={width}
style:max-height={height}
src={blobRef.src}
srcset={blobRef.srcset}
alt={name}
style:height={loading ? '0' : ''}
/>

View File

@ -17,7 +17,6 @@ import login, { loginId } from '@hcengineering/login'
import { broadcastEvent, getMetadata, getResource, setMetadata } from '@hcengineering/platform'
import presentation, {
closeClient,
getCurrentWorkspaceUrl,
purgeClient,
refreshClient,
setClient,
@ -77,6 +76,7 @@ export async function connect (title: string): Promise<Client | undefined> {
tokens[ws] = workspaceLoginInfo.token
token = workspaceLoginInfo.token
setMetadataLocalStorage(login.metadata.LoginTokens, tokens)
setMetadata(presentation.metadata.Workspace, workspaceLoginInfo.workspace)
}
setMetadata(presentation.metadata.Token, token)
@ -106,7 +106,9 @@ export async function connect (title: string): Promise<Client | undefined> {
}
}
setPresentationCookie(token, getCurrentWorkspaceUrl())
if (workspaceLoginInfo !== undefined) {
setPresentationCookie(token, workspaceLoginInfo.workspaceId)
}
setMetadataLocalStorage(login.metadata.LoginEndpoint, workspaceLoginInfo?.endpoint)
@ -344,9 +346,14 @@ function clearMetadata (ws: string): void {
delete tokens[loc.path[1]]
setMetadataLocalStorage(login.metadata.LoginTokens, tokens)
}
const currentWorkspace = getMetadata(presentation.metadata.Workspace)
if (currentWorkspace !== undefined) {
setPresentationCookie('', currentWorkspace)
}
setMetadata(presentation.metadata.Token, null)
setMetadata(presentation.metadata.Workspace, null)
setMetadataLocalStorage(login.metadata.LastToken, null)
setPresentationCookie('', getCurrentWorkspaceUrl())
setMetadataLocalStorage(login.metadata.LoginEndpoint, null)
setMetadataLocalStorage(login.metadata.LoginEmail, null)
void closeClient()

View File

@ -47,7 +47,7 @@ const storageConfig: StorageConfiguration = storageConfigFromEnv()
const lastNameFirst = process.env.LAST_NAME_FIRST === 'true'
setMetadata(contactPlugin.metadata.LastNameFirst, lastNameFirst)
setMetadata(serverCore.metadata.FrontUrl, config.frontUrl)
setMetadata(serverCore.metadata.UploadURL, config.uploadUrl)
setMetadata(serverCore.metadata.FilesUrl, config.filesUrl)
setMetadata(serverToken.metadata.Secret, config.serverSecret)
setMetadata(serverNotification.metadata.SesUrl, config.sesUrl ?? '')
setMetadata(notification.metadata.PushPublicKey, config.pushPublicKey)

View File

@ -59,7 +59,6 @@ export async function configurePlatform() {
const config = await (await fetch('/config.json')).json()
console.log('loading configuration', config)
setMetadata(login.metadata.AccountsUrl, config.ACCOUNTS_URL)
setMetadata(login.metadata.UploadUrl, config.UPLOAD_URL)
if (config.MODEL_VERSION != null) {
console.log('Minimal Model version requirement', config.MODEL_VERSION)

View File

@ -1487,6 +1487,11 @@
"projectFolder": "server/s3",
"shouldPublish": false
},
{
"packageName": "@hcengineering/datalake",
"projectFolder": "server/datalake",
"shouldPublish": false
},
{
"packageName": "@hcengineering/bitrix",
"projectFolder": "plugins/bitrix",

View File

@ -69,7 +69,7 @@ import notification, {
PushSubscription
} from '@hcengineering/notification'
import { getMetadata, getResource, translate } from '@hcengineering/platform'
import type { TriggerControl } from '@hcengineering/server-core'
import { type TriggerControl } from '@hcengineering/server-core'
import serverCore from '@hcengineering/server-core'
import serverNotification, {
getPersonAccount,
@ -539,7 +539,6 @@ export async function createPushNotification (
data.tag = _id
}
const front = control.branding?.front ?? getMetadata(serverCore.metadata.FrontUrl) ?? ''
const uploadUrl = getMetadata(serverCore.metadata.UploadURL) ?? ''
const domainPath = `${workbenchId}/${control.workspace.workspaceUrl}`
data.domain = concatLink(front, domainPath)
if (path !== undefined) {
@ -548,7 +547,10 @@ export async function createPushNotification (
if (senderAvatar != null) {
const provider = getAvatarProviderId(senderAvatar.avatarType)
if (provider === contact.avatarProvider.Image) {
data.icon = concatLink(uploadUrl, `?file=${senderAvatar.avatar}`)
if (senderAvatar.avatar != null) {
const url = await control.storageAdapter.getUrl(control.ctx, control.workspace, senderAvatar.avatar)
data.icon = url.includes('://') ? url : concatLink(front, url)
}
} else if (provider === contact.avatarProvider.Gravatar && senderAvatar.avatarProps?.url !== undefined) {
data.icon = getGravatarUrl(senderAvatar.avatarProps?.url, 512)
}

View File

@ -232,6 +232,8 @@ export interface WorkspaceLoginInfo extends LoginInfo {
workspace: string
productId: string
workspaceId: string
creating?: boolean
createProgress?: number
}
@ -635,6 +637,7 @@ export async function selectWorkspace (
email,
token,
workspace: workspaceUrl,
workspaceId: workspaceInfo.workspace,
productId,
creating: workspaceInfo.creating,
createProgress: workspaceInfo.createProgress
@ -667,6 +670,7 @@ export async function selectWorkspace (
email,
token: generateToken(email, getWorkspaceId(workspaceInfo.workspace, productId), getExtra(accountInfo)),
workspace: workspaceUrl,
workspaceId: workspaceInfo.workspace,
productId,
creating: workspaceInfo.creating,
createProgress: workspaceInfo.createProgress
@ -689,6 +693,7 @@ export async function selectWorkspace (
email,
token: generateToken(email, getWorkspaceId(workspaceInfo.workspace, productId), getExtra(accountInfo)),
workspace: workspaceUrl,
workspaceId: workspaceInfo.workspace,
productId,
creating: workspaceInfo.creating,
createProgress: workspaceInfo.createProgress

View File

@ -1,5 +1,5 @@
//
// Copyright © 2022 Hardcore Engineering Inc.
// Copyright © 2022, 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
@ -26,7 +26,6 @@ export interface Config {
AccountsUrl: string
MongoUrl: string
UploadUrl: string
}
const envMap: { [key in keyof Config]: string } = {
@ -35,8 +34,7 @@ const envMap: { [key in keyof Config]: string } = {
Interval: 'INTERVAL',
Port: 'COLLABORATOR_PORT',
AccountsUrl: 'ACCOUNTS_URL',
MongoUrl: 'MONGO_URL',
UploadUrl: 'UPLOAD_URL'
MongoUrl: 'MONGO_URL'
}
const required: Array<keyof Config> = ['Secret', 'ServiceID', 'Port', 'AccountsUrl', 'MongoUrl']
@ -48,8 +46,7 @@ const config: Config = (() => {
Interval: parseInt(process.env[envMap.Interval] ?? '30000'),
Port: parseInt(process.env[envMap.Port] ?? '3078'),
AccountsUrl: process.env[envMap.AccountsUrl],
MongoUrl: process.env[envMap.MongoUrl],
UploadUrl: process.env[envMap.UploadUrl] ?? '/files'
MongoUrl: process.env[envMap.MongoUrl]
}
const missingEnv = required.filter((key) => params[key] === undefined).map((key) => envMap[key])

View File

@ -34,6 +34,7 @@ import { simpleClientFactory } from './platform'
import { RpcErrorResponse, RpcRequest, RpcResponse, methods } from './rpc'
import { PlatformStorageAdapter } from './storage/platform'
import { MarkupTransformer } from './transformers/markup'
import { TransformerFactory } from './types'
/**
* @public
@ -57,23 +58,22 @@ export async function start (
const app = express()
app.use(cors())
app.use(bp.json())
const extensions = [
ServerKit.configure({
image: {
getBlobRef: async (fileId, name, size) => {
const sz = size !== undefined ? `&size=${size}` : ''
return {
src: `${config.UploadUrl}?file=${fileId}`,
srcset: `${config.UploadUrl}?file=${fileId}${sz}`
}
}
}
})
]
const extensionsCtx = ctx.newChild('extensions', {})
const transformer = new MarkupTransformer(extensions)
const transformerFactory: TransformerFactory = (workspaceId) => {
const extensions = [
ServerKit.configure({
image: {
getBlobRef: async (fileId, name, size) => {
const src = await storageAdapter.getUrl(ctx, workspaceId, fileId)
return { src, srcset: '' }
}
}
})
]
return new MarkupTransformer(extensions)
}
const hocuspocus = new Hocuspocus({
address: '0.0.0.0',
@ -116,7 +116,7 @@ export async function start (
}),
new StorageExtension({
ctx: extensionsCtx.newChild('storage', {}),
adapter: new PlatformStorageAdapter(storageAdapter, mongo, transformer)
adapter: new PlatformStorageAdapter(storageAdapter, mongo, transformerFactory)
})
]
})
@ -178,6 +178,7 @@ export async function start (
rpcCtx.info('rpc', { method: request.method, connectionId: context.connectionId, mode: token.extra?.mode ?? '' })
await rpcCtx.with('/rpc', { method: request.method }, async (ctx) => {
try {
const transformer = transformerFactory(token.workspace)
const response: RpcResponse = await rpcCtx.with(request.method, {}, async (ctx) => {
return await method(ctx, context, request.payload, { hocuspocus, storageAdapter, transformer })
})

View File

@ -35,10 +35,10 @@ import core, {
} from '@hcengineering/core'
import { StorageAdapter } from '@hcengineering/server-core'
import { areEqualMarkups } from '@hcengineering/text'
import { Transformer } from '@hocuspocus/transformer'
import { MongoClient } from 'mongodb'
import { Doc as YDoc } from 'yjs'
import { Context } from '../context'
import { TransformerFactory } from '../types'
import { CollabStorageAdapter } from './adapter'
@ -46,7 +46,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
constructor (
private readonly storage: StorageAdapter,
private readonly mongodb: MongoClient,
private readonly transformer: Transformer
private readonly transformerFactory: TransformerFactory
) {}
async loadDocument (ctx: MeasureContext, documentId: DocumentId, context: Context): Promise<YDoc | undefined> {
@ -208,18 +208,18 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
platformDocumentId: PlatformDocumentId,
context: Context
): Promise<YDoc | undefined> {
const { mongodb, transformer } = this
const { workspaceId } = context
const { objectDomain, objectId, objectAttr } = parsePlatformDocumentId(platformDocumentId)
const doc = await ctx.with('query', {}, async () => {
const db = mongodb.db(toWorkspaceString(workspaceId))
const db = this.mongodb.db(toWorkspaceString(workspaceId))
return await db.collection<Doc>(objectDomain).findOne({ _id: objectId }, { projection: { [objectAttr]: 1 } })
})
const content = doc !== null && objectAttr in doc ? ((doc as any)[objectAttr] as string) : ''
if (content.startsWith('{') && content.endsWith('}')) {
return await ctx.with('transform', {}, () => {
const transformer = this.transformerFactory(workspaceId)
return transformer.toYdoc(content, objectAttr)
})
}
@ -237,6 +237,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
snapshot: YDocVersion | undefined,
context: Context
): Promise<void> {
const { workspaceId } = context
const { objectClass, objectId, objectAttr } = parsePlatformDocumentId(platformDocumentId)
const attribute = client.getHierarchy().findAttribute(objectClass, objectAttr)
@ -267,7 +268,8 @@ export class PlatformStorageAdapter implements CollabStorageAdapter {
} else if (hierarchy.isDerived(attribute.type._class, core.class.TypeCollaborativeMarkup)) {
// TODO a temporary solution while we are keeping Markup in Mongo
const content = await ctx.with('transform', {}, () => {
return this.transformer.fromYdoc(document, objectAttr)
const transformer = this.transformerFactory(workspaceId)
return transformer.fromYdoc(document, objectAttr)
})
if (!areEqualMarkups(content, (current as any)[objectAttr])) {
await ctx.with('update', {}, async () => {

View File

@ -13,7 +13,8 @@
// limitations under the License.
//
import type { Class, Doc, Domain, Ref } from '@hcengineering/core'
import type { Class, Doc, Domain, Ref, WorkspaceId } from '@hcengineering/core'
import { Transformer } from '@hocuspocus/transformer'
/** @public */
export interface DocumentId {
@ -29,3 +30,6 @@ export interface PlatformDocumentId {
objectId: Ref<Doc>
objectAttr: string
}
/** @public */
export type TransformerFactory = (workspaceId: WorkspaceId) => Transformer

View File

@ -147,6 +147,10 @@ export class MemStorageAdapter implements StorageAdapter {
// Partial are not supported by
throw new Error('NoSuchKey')
}
async getUrl (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<string> {
return '/files/' + objectName
}
}
export class MemRawDBAdapter implements RawDBAdapter {

View File

@ -41,7 +41,7 @@ const serverCore = plugin(serverCoreId, {
},
metadata: {
FrontUrl: '' as Metadata<string>,
UploadURL: '' as Metadata<string>,
FilesUrl: '' as Metadata<string>,
ElasticIndexName: '' as Metadata<string>,
ElasticIndexVersion: '' as Metadata<string>
}

View File

@ -253,8 +253,12 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
@withContext('aggregator-get', {})
async get (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise<Readable> {
const { provider, stat } = await this.findProvider(ctx, workspaceId, name)
return await provider.get(ctx, workspaceId, stat.storageId)
// const { provider, stat } = await this.findProvider(ctx, workspaceId, name)
const provider = this.adapters.get(this.defaultAdapter)
if (provider === undefined) {
throw new NoSuchKeyError('No such provider found')
}
return await provider.get(ctx, workspaceId, name)
}
@withContext('find-provider', {})
@ -353,12 +357,19 @@ export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterE
// If the file is already stored in different provider, we need to remove it.
if (stat !== undefined && stat.provider !== provider) {
const adapter = this.adapters.get(stat.provider)
await adapter?.remove(ctx, workspaceId, [stat._id])
// TODO temporary not needed
// const adapter = this.adapters.get(stat.provider)
// await adapter?.remove(ctx, workspaceId, [stat._id])
}
return result
}
@withContext('aggregator-getUrl', {})
async getUrl (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise<string> {
const { provider, stat } = await this.findProvider(ctx, workspaceId, name)
return await provider.getUrl(ctx, workspaceId, stat.storageId)
}
}
/**

View File

@ -0,0 +1,7 @@
module.exports = {
extends: ['./node_modules/@hcengineering/platform-rig/profiles/node/eslint.config.json'],
parserOptions: {
tsconfigRootDir: __dirname,
project: './tsconfig.json'
}
}

View File

@ -0,0 +1,4 @@
*
!/lib/**
!CHANGELOG.md
/lib/**/__tests__/

View File

@ -0,0 +1,5 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
"rigPackageName": "@hcengineering/platform-rig",
"rigProfile": "node"
}

View File

@ -0,0 +1,7 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
testMatch: ['**/?(*.)+(spec|test).[jt]s?(x)'],
roots: ["./src"],
coverageReporters: ["text-summary", "html"]
}

View File

@ -0,0 +1,44 @@
{
"name": "@hcengineering/datalake",
"version": "0.6.0",
"main": "lib/index.js",
"svelte": "src/index.ts",
"types": "types/index.d.ts",
"author": "Anticrm Platform Contributors",
"template": "@hcengineering/node-package",
"license": "EPL-2.0",
"scripts": {
"build": "compile",
"build:watch": "compile",
"test": "jest --passWithNoTests --silent --forceExit",
"format": "format src",
"_phase:build": "compile transpile src",
"_phase:test": "jest --passWithNoTests --silent --forceExit",
"_phase:format": "format src",
"_phase:validate": "compile validate"
},
"devDependencies": {
"@hcengineering/platform-rig": "^0.6.0",
"@typescript-eslint/eslint-plugin": "^6.11.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-promise": "^6.1.1",
"eslint-plugin-n": "^15.4.0",
"eslint": "^8.54.0",
"@typescript-eslint/parser": "^6.11.0",
"eslint-config-standard-with-typescript": "^40.0.0",
"prettier": "^3.1.0",
"typescript": "^5.3.3",
"@types/node": "~20.11.16",
"jest": "^29.7.0",
"ts-jest": "^29.1.1",
"@types/jest": "^29.5.5",
"@types/node-fetch": "~2.6.2"
},
"dependencies": {
"@hcengineering/core": "^0.6.32",
"@hcengineering/platform": "^0.6.11",
"@hcengineering/server-core": "^0.6.1",
"node-fetch": "^2.6.6",
"form-data": "^4.0.0"
}
}

View File

@ -0,0 +1,127 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { type MeasureContext, type WorkspaceId, concatLink } from '@hcengineering/core'
import FormData from 'form-data'
import fetch from 'node-fetch'
import { Readable } from 'stream'
/** @public */
export interface ObjectMetadata {
lastModified: number
name: string
type: string
size?: number
}
/** @public */
export interface PutObjectOutput {
id: string
}
interface BlobUploadError {
key: string
error: string
}
interface BlobUploadSuccess {
key: string
id: string
metadata: ObjectMetadata
}
type BlobUploadResult = BlobUploadSuccess | BlobUploadError
/** @public */
export class Client {
constructor (private readonly endpoint: string) {}
getObjectUrl (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): string {
const path = `/blob/${workspace.name}/${objectName}`
return concatLink(this.endpoint, path)
}
async getObject (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<Readable> {
const url = this.getObjectUrl(ctx, workspace, objectName)
const response = await fetch(url)
if (!response.ok) {
throw new Error('HTTP error ' + response.status)
}
if (response.body == null) {
ctx.error('bad datalake response', { objectName })
throw new Error('Missing response body')
}
return Readable.from(response.body)
}
async deleteObject (ctx: MeasureContext, workspace: WorkspaceId, objectName: string): Promise<void> {
const url = this.getObjectUrl(ctx, workspace, objectName)
const response = await fetch(url, { method: 'DELETE' })
if (!response.ok) {
throw new Error('HTTP error ' + response.status)
}
}
async putObject (
ctx: MeasureContext,
workspace: WorkspaceId,
objectName: string,
stream: Readable | Buffer | string,
metadata: ObjectMetadata
): Promise<PutObjectOutput> {
const path = `/upload/form-data/${workspace.name}`
const url = concatLink(this.endpoint, path)
const form = new FormData()
const options: FormData.AppendOptions = {
filename: objectName,
contentType: metadata.type,
knownLength: metadata.size,
header: {
'Last-Modified': metadata.lastModified
}
}
form.append('file', stream, options)
const response = await fetch(url, {
method: 'POST',
body: form
})
if (!response.ok) {
throw new Error('HTTP error ' + response.status)
}
const result = (await response.json()) as BlobUploadResult[]
if (result.length !== 1) {
ctx.error('bad datalake response', { objectName, result })
throw new Error('Bad datalake response')
}
const uploadResult = result[0]
if ('error' in uploadResult) {
ctx.error('error during blob upload', { objectName, error: uploadResult.error })
throw new Error('Upload failed: ' + uploadResult.error)
} else {
return { id: uploadResult.id }
}
}
}

View File

@ -0,0 +1,170 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { withContext, type Blob, type MeasureContext, type WorkspaceId } from '@hcengineering/core'
import {
type BlobStorageIterator,
type BucketInfo,
type StorageAdapter,
type StorageConfig,
type StorageConfiguration,
type UploadedObjectInfo
} from '@hcengineering/server-core'
import { type Readable } from 'stream'
import { type ObjectMetadata, Client } from './client'
export interface DatalakeConfig extends StorageConfig {
kind: 'datalake'
}
/**
* @public
*/
export class DatalakeService implements StorageAdapter {
static config = 'datalake'
client: Client
constructor (readonly opt: DatalakeConfig) {
this.client = new Client(opt.endpoint)
}
async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {}
async close (): Promise<void> {}
async exists (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<boolean> {
// workspace/buckets not supported, assume that always exist
return true
}
@withContext('make')
async make (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
// workspace/buckets not supported, assume that always exist
}
async listBuckets (ctx: MeasureContext, productId: string): Promise<BucketInfo[]> {
return []
}
@withContext('remove')
async remove (ctx: MeasureContext, workspaceId: WorkspaceId, objectNames: string[]): Promise<void> {
await Promise.all(
objectNames.map(async (objectName) => {
await this.client.deleteObject(ctx, workspaceId, objectName)
})
)
}
@withContext('delete')
async delete (ctx: MeasureContext, workspaceId: WorkspaceId): Promise<void> {
// not supported, just do nothing and pretend we deleted the workspace
}
@withContext('listStream')
async listStream (
ctx: MeasureContext,
workspaceId: WorkspaceId,
prefix?: string | undefined
): Promise<BlobStorageIterator> {
throw new Error('not supported')
}
@withContext('stat')
async stat (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Blob | undefined> {
// not supported
return undefined
}
@withContext('get')
async get (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Readable> {
return await this.client.getObject(ctx, workspaceId, objectName)
}
@withContext('put')
async put (
ctx: MeasureContext,
workspaceId: WorkspaceId,
objectName: string,
stream: Readable | Buffer | string,
contentType: string,
size?: number
): Promise<UploadedObjectInfo> {
const metadata: ObjectMetadata = {
lastModified: Date.now(),
name: objectName,
type: contentType,
size
}
await ctx.with('put', {}, async () => {
return await this.client.putObject(ctx, workspaceId, objectName, stream, metadata)
})
return {
etag: '',
versionId: ''
}
}
@withContext('read')
async read (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<Buffer[]> {
const data = await this.client.getObject(ctx, workspaceId, objectName)
const chunks: Buffer[] = []
for await (const chunk of data) {
chunks.push(chunk)
}
return chunks
}
@withContext('partial')
async partial (
ctx: MeasureContext,
workspaceId: WorkspaceId,
objectName: string,
offset: number,
length?: number
): Promise<Readable> {
throw new Error('not implemented')
}
async getUrl (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<string> {
return this.client.getObjectUrl(ctx, workspaceId, objectName)
}
}
export function processConfigFromEnv (storageConfig: StorageConfiguration): string | undefined {
let endpoint = process.env.DATALAKE_ENDPOINT
if (endpoint === undefined) {
return 'DATALAKE_ENDPOINT'
}
let port = 80
const sp = endpoint.split(':')
if (sp.length > 1) {
endpoint = sp[0]
port = parseInt(sp[1])
}
const config: DatalakeConfig = {
kind: 'datalake',
name: 'datalake',
endpoint,
port
}
storageConfig.storages.push(config)
storageConfig.default = 'datalake'
}

View File

@ -0,0 +1,10 @@
{
"extends": "./node_modules/@hcengineering/platform-rig/profiles/node/tsconfig.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./lib",
"declarationDir": "./types",
"tsBuildInfoFile": ".build/build.tsbuildinfo"
}
}

View File

@ -44,18 +44,17 @@ async function storageUpload (
workspace: WorkspaceId,
file: UploadedFile
): Promise<string> {
const id = uuid()
const uuid = file.name
const data = file.tempFilePath !== undefined ? fs.createReadStream(file.tempFilePath) : file.data
const resp = await ctx.with(
'storage upload',
{ workspace: workspace.name },
async (ctx) => await storageAdapter.put(ctx, workspace, id, data, file.mimetype, file.size),
async (ctx) => await storageAdapter.put(ctx, workspace, uuid, data, file.mimetype, file.size),
{ file: file.name, contentType: file.mimetype }
)
ctx.info('minio upload', resp)
return id
ctx.info('storage upload', resp)
return uuid
}
function getRange (range: string, size: number): [number, number] {
@ -84,6 +83,7 @@ async function getFileRange (
workspace: WorkspaceId,
res: Response
): Promise<void> {
const uuid = stat._id
const size: number = stat.size
const [start, end] = getRange(range, size)
@ -240,6 +240,7 @@ export function start (
storageAdapter: StorageAdapter
accountsUrl: string
uploadUrl: string
filesUrl: string
modelVersion: string
version: string
rekoniUrl: string
@ -290,6 +291,7 @@ export function start (
const data = {
ACCOUNTS_URL: config.accountsUrl,
UPLOAD_URL: config.uploadUrl,
FILES_URL: config.filesUrl,
MODEL_VERSION: config.modelVersion,
VERSION: config.version,
REKONI_URL: config.rekoniUrl,
@ -511,7 +513,12 @@ export function start (
const payload = decodeToken(token)
const uuid = await storageUpload(ctx, config.storageAdapter, payload.workspace, file)
res.status(200).send(uuid)
res.status(200).send([
{
key: 'file',
id: uuid
}
])
} catch (error: any) {
ctx.error('error-post-files', error)
res.status(500).send()
@ -606,7 +613,7 @@ export function start (
const buffer = Buffer.concat(data)
config.storageAdapter
.put(ctx, payload.workspace, id, buffer, contentType, buffer.length)
.then(async (objInfo) => {
.then(async () => {
res.status(200).send({
id,
contentType,

View File

@ -1,6 +1,6 @@
//
// Copyright © 2020, 2021 Anticrm Platform Contributors.
// Copyright © 2021 Hardcore Engineering Inc.
// Copyright © 2021, 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
@ -106,12 +106,16 @@ export function startFront (ctx: MeasureContext, extraConfig?: Record<string, st
}
let previewConfig = process.env.PREVIEW_CONFIG
if (previewConfig === undefined) {
// Use universal preview config
previewConfig = `${uploadUrl}/:workspace?file=:blobId&size=:size`
}
let filesUrl = process.env.FILES_URL
if (filesUrl === undefined) {
filesUrl = `${uploadUrl}/:workspace/:filename?file=:blobId&workspace=:workspace`
}
const pushPublicKey = process.env.PUSH_PUBLIC_KEY
const brandingUrl = process.env.BRANDING_URL
@ -123,6 +127,7 @@ export function startFront (ctx: MeasureContext, extraConfig?: Record<string, st
storageAdapter,
accountsUrl,
uploadUrl,
filesUrl,
modelVersion,
version,
gmailUrl,

View File

@ -23,8 +23,8 @@ import core, {
type Ref,
type WorkspaceId
} from '@hcengineering/core'
import {
import { getMetadata } from '@hcengineering/platform'
import serverCore, {
removeAllObjects,
type BlobStorageIterator,
type BucketInfo,
@ -363,6 +363,12 @@ export class MinioService implements StorageAdapter {
length
)
}
@withContext('getUrl')
async getUrl (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<string> {
const filesUrl = getMetadata(serverCore.metadata.FilesUrl) ?? ''
return filesUrl.replaceAll(':workspace', workspaceId.name).replaceAll(':blobId', objectName)
}
}
export function processConfigFromEnv (storageConfig: StorageConfiguration): string | undefined {

View File

@ -35,6 +35,7 @@
},
"dependencies": {
"@hcengineering/core": "^0.6.32",
"@hcengineering/platform": "^0.6.11",
"@hcengineering/server-core": "^0.6.1",
"@hcengineering/storage": "^0.6.0",
"@aws-sdk/client-s3": "^3.575.0",

View File

@ -24,8 +24,8 @@ import core, {
type Ref,
type WorkspaceId
} from '@hcengineering/core'
import {
import { getMetadata } from '@hcengineering/platform'
import serverCore, {
type BlobStorageIterator,
type ListBlobResult,
type StorageAdapter,
@ -433,6 +433,12 @@ export class S3Service implements StorageAdapter {
const range = length !== undefined ? `bytes=${offset}-${offset + length}` : `bytes=${offset}-`
return await this.doGet(ctx, workspaceId, objectName, range)
}
@withContext('getUrl')
async getUrl (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise<string> {
const filesUrl = getMetadata(serverCore.metadata.FilesUrl) ?? ''
return filesUrl.replaceAll(':workspace', workspaceId.name).replaceAll(':blobId', objectName)
}
}
export function processConfigFromEnv (storageConfig: StorageConfiguration): string | undefined {
@ -450,7 +456,7 @@ export function processConfigFromEnv (storageConfig: StorageConfiguration): stri
return 'S3_SECRET_KEY'
}
const minioConfig: S3Config = {
const config: S3Config = {
kind: 's3',
name: 's3',
region: 'auto',
@ -458,6 +464,6 @@ export function processConfigFromEnv (storageConfig: StorageConfiguration): stri
accessKey,
secretKey
}
storageConfig.storages.push(minioConfig)
storageConfig.storages.push(config)
storageConfig.default = 's3'
}

View File

@ -48,6 +48,7 @@
"@hcengineering/mongo": "^0.6.1",
"@hcengineering/minio": "^0.6.0",
"@hcengineering/s3": "^0.6.0",
"@hcengineering/datalake": "^0.6.0",
"elastic-apm-node": "~3.26.0",
"@hcengineering/server-token": "^0.6.11"
}

View File

@ -1,3 +1,4 @@
import { type DatalakeConfig, DatalakeService } from '@hcengineering/datalake'
import { MinioConfig, MinioService, addMinioFallback } from '@hcengineering/minio'
import { createRawMongoDBAdapter } from '@hcengineering/mongo'
import { S3Service, type S3Config } from '@hcengineering/s3'
@ -96,6 +97,12 @@ export function createStorageFromConfig (config: StorageConfig): StorageAdapter
throw new Error('One of endpoint/accessKey/secretKey values are not specified')
}
return new S3Service(c)
} else if (kind === DatalakeService.config) {
const c = config as DatalakeConfig
if (c.endpoint == null) {
throw new Error('Endpoint value is not specified')
}
return new DatalakeService(c)
} else {
throw new Error('Unsupported storage kind:' + kind)
}

View File

@ -4,7 +4,7 @@ export interface ServerEnv {
serverSecret: string
rekoniUrl: string
frontUrl: string
uploadUrl: string
filesUrl: string | undefined
sesUrl: string | undefined
accountsUrl: string
serverPort: number
@ -55,12 +55,7 @@ export function serverConfigFromEnv (): ServerEnv {
process.exit(1)
}
const uploadUrl = process.env.UPLOAD_URL
if (uploadUrl === undefined) {
console.log('Please provide UPLOAD_URL url')
process.exit(1)
}
const filesUrl = process.env.FILES_URL
const sesUrl = process.env.SES_URL
const accountsUrl = process.env.ACCOUNTS_URL
@ -81,7 +76,7 @@ export function serverConfigFromEnv (): ServerEnv {
serverSecret,
rekoniUrl,
frontUrl,
uploadUrl,
filesUrl,
sesUrl,
accountsUrl,
serverPort,

View File

@ -163,6 +163,7 @@ export class GithubWorker implements IntegrationManager {
}
const frontUrl = this.getBranding()?.front ?? config.FrontURL
const refUrl = concatLink(frontUrl, `/browse/?workspace=${this.workspace.name}`)
// TODO storage URL
const imageUrl = concatLink(frontUrl ?? config.FrontURL, `/files?workspace=${this.workspace.name}&file=`)
const guestUrl = getPublicLinkUrl(this.workspace, frontUrl)
const json = parseMessageMarkdown(text ?? '', refUrl, imageUrl, guestUrl)
@ -177,6 +178,7 @@ export class GithubWorker implements IntegrationManager {
return await markupToMarkdown(
text ?? '',
concatLink(this.getBranding()?.front ?? config.FrontURL, `/browse/?workspace=${this.workspace.name}`),
// TODO storage URL
concatLink(this.getBranding()?.front ?? config.FrontURL, `/files?workspace=${this.workspace.name}&file=`),
preprocessor
)

View File

@ -101,7 +101,6 @@ services:
- STORAGE_CONFIG=${STORAGE_CONFIG}
- REKONI_URL=http://rekoni:7
- FRONT_URL=http://localhost:8083
- UPLOAD_URL=http://localhost:8083/files
- ACCOUNTS_URL=http://account:3003
- LAST_NAME_FIRST=true
- ELASTIC_INDEX_NAME=local_storage_index
@ -118,7 +117,6 @@ services:
- COLLABORATOR_PORT=3078
- SECRET=secret
- ACCOUNTS_URL=http://account:3003
- UPLOAD_URL=/files
- MONGO_URL=mongodb://mongodb:27018
- STORAGE_CONFIG=${STORAGE_CONFIG}
restart: unless-stopped