UBERF-6126: Storage adapter (#5035)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2024-03-22 14:12:41 +07:00 committed by GitHub
parent 466082e4cd
commit 5fd60d1096
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
69 changed files with 1672 additions and 2606 deletions

View File

@ -152,9 +152,6 @@ dependencies:
'@rush-temp/front':
specifier: file:./projects/front.tgz
version: file:projects/front.tgz(esbuild@0.20.1)
'@rush-temp/generator':
specifier: file:./projects/generator.tgz
version: file:projects/generator.tgz(bufferutil@4.0.8)
'@rush-temp/gmail':
specifier: file:./projects/gmail.tgz
version: file:projects/gmail.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2)
@ -875,9 +872,6 @@ dependencies:
'@types/express-fileupload':
specifier: ^1.1.7
version: 1.4.4
'@types/faker':
specifier: ~5.5.9
version: 5.5.9
'@types/html-to-text':
specifier: ^8.1.1
version: 8.1.1
@ -923,9 +917,6 @@ dependencies:
'@types/passport-google-oauth20':
specifier: ^2.0.0
version: 2.0.14
'@types/pdfkit':
specifier: ~0.12.3
version: 0.12.12
'@types/png-chunks-extract':
specifier: ^1.0.2
version: 1.0.2
@ -1082,9 +1073,6 @@ dependencies:
express-static-gzip:
specifier: ^2.1.7
version: 2.1.7
faker:
specifier: ~5.5.3
version: 5.5.3
fast-copy:
specifier: ~3.0.1
version: 3.0.1
@ -1115,9 +1103,6 @@ dependencies:
jest:
specifier: ^29.7.0
version: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2)
jpeg-js:
specifier: ~0.4.3
version: 0.4.4
just-clone:
specifier: ~6.2.0
version: 6.2.0
@ -1178,9 +1163,6 @@ dependencies:
passport-google-oauth20:
specifier: ~2.0.0
version: 2.0.0
pdfkit:
specifier: ~0.13.0
version: 0.13.0
png-chunks-extract:
specifier: ^1.0.0
version: 1.0.0
@ -5558,12 +5540,6 @@ packages:
resolution: {integrity: sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==}
dev: false
/@swc/helpers@0.3.17:
resolution: {integrity: sha512-tb7Iu+oZ+zWJZ3HJqwx8oNwSDIU440hmVMDPhpACWQWnrZHK99Bxs70gT1L2dnr5Hg50ZRWEFkQCAnOVVV0z1Q==}
dependencies:
tslib: 2.6.2
dev: false
/@swc/types@0.1.5:
resolution: {integrity: sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==}
dev: false
@ -6191,10 +6167,6 @@ packages:
'@types/serve-static': 1.15.5
dev: false
/@types/faker@5.5.9:
resolution: {integrity: sha512-uCx6mP3UY5SIO14XlspxsGjgaemrxpssJI0Ol+GfhxtcKpv9pgRZYsS4eeKeHVLje6Qtc8lGszuBI461+gVZBA==}
dev: false
/@types/find-cache-dir@3.2.1:
resolution: {integrity: sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw==}
dev: false
@ -6436,12 +6408,6 @@ packages:
'@types/express': 4.17.21
dev: false
/@types/pdfkit@0.12.12:
resolution: {integrity: sha512-plkfKdaPSVIndcsg2qFmQ/qazRivTyHALEiKufsTI6qn4hbXbFVm0n9RuY0RQpoBxM2NNK5SudCfpisDzIdXoA==}
dependencies:
'@types/node': 20.11.19
dev: false
/@types/png-chunks-extract@1.0.2:
resolution: {integrity: sha512-z6djfFIbrrddtunoMJBOPlyZrnmeuG1kkvHUNi2QfpOb+JMMLuLliHHTmMyRi7k7LiTAut0HbdGCF6ibDtQAHQ==}
dev: false
@ -7541,11 +7507,6 @@ packages:
dev: false
optional: true
/base64-js@0.0.8:
resolution: {integrity: sha512-3XSA2cR/h/73EzlXXdU6YNycmYI7+kicTxks4eJg2g39biHR84slg2+des+p7iHYhbRg/udIS4TD53WabcOUkw==}
engines: {node: '>= 0.4'}
dev: false
/base64-js@1.5.1:
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
dev: false
@ -7667,12 +7628,6 @@ packages:
object.entries: 1.1.7
dev: false
/brotli@1.3.3:
resolution: {integrity: sha512-oTKjJdShmDuGW94SyyaoQvAjf30dZaHnjJ8uAF+u2/vGJkJbJPJAT1gDiOJP5v1Zb6f9KEyW/1HpuaWIXtGHPg==}
dependencies:
base64-js: 1.5.1
dev: false
/browser-assert@1.2.1:
resolution: {integrity: sha512-nfulgvOR6S4gt9UKCeGJOuSGBPGiFT6oQ/2UBnvTY/5aQ1PnksW72fhZkM30DzoRRv2WpwZf1vHHEr3mtuXIWQ==}
dev: false
@ -7974,11 +7929,6 @@ packages:
engines: {node: '>=0.8'}
dev: false
/clone@2.1.2:
resolution: {integrity: sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==}
engines: {node: '>=0.8'}
dev: false
/co-body@6.1.0:
resolution: {integrity: sha512-m7pOT6CdLN7FuXUcpuz/8lfQ/L77x8SchHCF4G0RBTJO20Wzmhn5Sp4/5WsKy8OSpifBSUrmg83qEqaDHdyFuQ==}
dependencies:
@ -8686,10 +8636,6 @@ packages:
- supports-color
dev: false
/dfa@1.2.0:
resolution: {integrity: sha512-ED3jP8saaweFTjeGX8HQPjeC1YYyZs98jGNZx6IiBvxW7JG5v492kamAQB3m2wop07CvU/RQmzcKr6bgcC5D/Q==}
dev: false
/diff-sequences@29.6.3:
resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
@ -9821,10 +9767,6 @@ packages:
yauzl: 2.10.0
dev: false
/faker@5.5.3:
resolution: {integrity: sha512-wLTv2a28wjUyWkbnX7u/ABZBkUkIF2fCd73V6P2oFqEGEktDfzWx4UxrSqtPRw0xPRAcjeAOIiJWqZm3pP4u3g==}
dev: false
/fast-copy@3.0.1:
resolution: {integrity: sha512-Knr7NOtK3HWRYGtHoJrjkaWepqT8thIVGAwt0p0aUs1zqkAzXZV4vo9fFNwyb5fcqK1GKYFYxldQdIDVKhUAfA==}
dev: false
@ -10071,20 +10013,6 @@ packages:
optional: true
dev: false
/fontkit@1.9.0:
resolution: {integrity: sha512-HkW/8Lrk8jl18kzQHvAw9aTHe1cqsyx5sDnxncx652+CIfhawokEPkeM3BoIC+z/Xv7a0yMr0f3pRRwhGH455g==}
dependencies:
'@swc/helpers': 0.3.17
brotli: 1.3.3
clone: 2.1.2
deep-equal: 2.2.3
dfa: 1.2.0
restructure: 2.0.1
tiny-inflate: 1.0.3
unicode-properties: 1.4.1
unicode-trie: 2.0.0
dev: false
/for-each@0.3.3:
resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==}
dependencies:
@ -11760,10 +11688,6 @@ packages:
hasBin: true
dev: false
/jpeg-js@0.4.4:
resolution: {integrity: sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==}
dev: false
/js-tokens@4.0.0:
resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
dev: false
@ -12168,13 +12092,6 @@ packages:
engines: {node: '>=14'}
dev: false
/linebreak@1.1.0:
resolution: {integrity: sha512-MHp03UImeVhB7XZtjd0E4n6+3xr5Dq/9xI/5FptGk5FrbDR3zagPa2DS6U8ks/3HjbKWG9Q1M2ufOzxV2qLYSQ==}
dependencies:
base64-js: 0.0.8
unicode-trie: 2.0.0
dev: false
/lines-and-columns@1.2.4:
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
dev: false
@ -13357,15 +13274,6 @@ packages:
resolution: {integrity: sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg==}
dev: false
/pdfkit@0.13.0:
resolution: {integrity: sha512-AW79eHU5eLd2vgRDS9z3bSoi0FA+gYm+100LLosrQQMLUzOBGVOhG7ABcMFpJu7Bpg+MT74XYHi4k9EuU/9EZw==}
dependencies:
crypto-js: 4.2.0
fontkit: 1.9.0
linebreak: 1.1.0
png-js: 1.0.0
dev: false
/peberminta@0.9.0:
resolution: {integrity: sha512-XIxfHpEuSJbITd1H3EeQwpcZbTLHc+VVr8ANI9t5sit565tsI4/xK3KWTUFE2e6QiangUkh3B0jihzmGnNrRsQ==}
dev: false
@ -13476,10 +13384,6 @@ packages:
crc-32: 0.3.0
dev: false
/png-js@1.0.0:
resolution: {integrity: sha512-k+YsbhpA9e+EFfKjTCH3VW6aoKlyNYI6NYdTfDL4CIvFnvsuO84ttonmZE7rc+v23SLTH8XX+5w/Ak9v0xGY4g==}
dev: false
/polished@4.3.1:
resolution: {integrity: sha512-OBatVyC/N7SCW/FaDHrSd+vn0o5cS855TOmYi4OkdWUMSJCET/xip//ch8xGUvtr3i44X9LVyWwQlRMTN3pwSA==}
engines: {node: '>=10'}
@ -14399,10 +14303,6 @@ packages:
signal-exit: 3.0.7
dev: false
/restructure@2.0.1:
resolution: {integrity: sha512-e0dOpjm5DseomnXx2M5lpdZ5zoHqF1+bqdMJUohoYVVQa7cBdnk7fdmeI6byNWP/kiME72EeTiSypTCVnpLiDg==}
dev: false
/retry@0.13.1:
resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==}
engines: {node: '>= 4'}
@ -15588,10 +15488,6 @@ packages:
resolution: {integrity: sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==}
dev: false
/tiny-inflate@1.0.3:
resolution: {integrity: sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw==}
dev: false
/tiny-invariant@1.3.1:
resolution: {integrity: sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==}
dev: false
@ -15949,13 +15845,6 @@ packages:
engines: {node: '>=4'}
dev: false
/unicode-properties@1.4.1:
resolution: {integrity: sha512-CLjCCLQ6UuMxWnbIylkisbRj31qxHPAurvena/0iwSVbQ2G1VY5/HjV0IRabOEbDHlzZlRdCrD4NhB0JtU40Pg==}
dependencies:
base64-js: 1.5.1
unicode-trie: 2.0.0
dev: false
/unicode-property-aliases-ecmascript@2.1.0:
resolution: {integrity: sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==}
engines: {node: '>=4'}
@ -15965,13 +15854,6 @@ packages:
resolution: {integrity: sha512-36Xaw9wXi7MB/3/EQZZHkZyyiRNa9i3k9YtPAz2KfqMVH2xutdXyMHn4Igarmnvr+wOrfWa/6njhY+jPpXN2EQ==}
dev: false
/unicode-trie@2.0.0:
resolution: {integrity: sha512-x7bc76x0bm4prf1VLg79uhAzKw8DVboClSN5VxJuQ+LKDOVEW9CdH+VY7SP+vX7xCYQqzzgQpFqz15zeLvAtZQ==}
dependencies:
pako: 0.2.9
tiny-inflate: 1.0.3
dev: false
/unique-string@2.0.0:
resolution: {integrity: sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==}
engines: {node: '>=8'}
@ -17675,7 +17557,7 @@ packages:
dev: false
file:projects/collaboration.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-qnC0fEHLXpav10agLIE1xVh7XqyjlAJEV/gUkM/inVZQm9IC6mPykcvcVAdivSnCVuxQX4KQ2SX8Rwj7NzpGiQ==, tarball: file:projects/collaboration.tgz}
resolution: {integrity: sha512-LsJj6G8ubOzWhCU4REQt55SNJhtXzIE+7lAfX1G6IP2pfsERQ4n9HhEEJ3tKqeKD5qsG1/iyvDqHfWhZtOb8eQ==, tarball: file:projects/collaboration.tgz}
id: file:projects/collaboration.tgz
name: '@rush-temp/collaboration'
version: 0.0.0
@ -18347,49 +18229,6 @@ packages:
- supports-color
dev: false
file:projects/generator.tgz(bufferutil@4.0.8):
resolution: {integrity: sha512-htQgJlWm7m68YEmR5OivK2QfN1kOVGxqz/iHU4FYYSyox6t519nuh0S/+k98zfAo/BkAfKTdpfRxtH80dBlrQw==, tarball: file:projects/generator.tgz}
id: file:projects/generator.tgz
name: '@rush-temp/generator'
version: 0.0.0
dependencies:
'@types/faker': 5.5.9
'@types/jest': 29.5.12
'@types/node': 20.11.19
'@types/pdfkit': 0.12.12
'@types/ws': 8.5.10
'@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3)
'@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.3.3)
commander: 8.3.0
esbuild: 0.20.1
eslint: 8.56.0
eslint-config-standard-with-typescript: 40.0.0(@typescript-eslint/eslint-plugin@6.21.0)(eslint-plugin-import@2.29.1)(eslint-plugin-n@15.7.0)(eslint-plugin-promise@6.1.1)(eslint@8.56.0)(typescript@5.3.3)
eslint-plugin-import: 2.29.1(eslint@8.56.0)
eslint-plugin-n: 15.7.0(eslint@8.56.0)
eslint-plugin-promise: 6.1.1(eslint@8.56.0)
faker: 5.5.3
jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2)
jpeg-js: 0.4.4
pdfkit: 0.13.0
prettier: 3.2.5
prettier-plugin-svelte: 3.2.1(prettier@3.2.5)(svelte@4.2.11)
ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3)
ts-node: 10.9.2(@types/node@20.11.19)(typescript@5.3.3)
typescript: 5.3.3
ws: 8.16.0(bufferutil@4.0.8)
transitivePeerDependencies:
- '@babel/core'
- '@jest/types'
- '@swc/core'
- '@swc/wasm'
- babel-jest
- babel-plugin-macros
- bufferutil
- node-notifier
- supports-color
- utf-8-validate
dev: false
file:projects/gmail-assets.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-lBPx5NE4J+fgYR9I5OVbgTfue3nKAjetJUlbK5LLh4OmGBCUBQazZyXH4hYNjOGzTZy6kp29X+p2YRzgAFWn3w==, tarball: file:projects/gmail-assets.tgz}
id: file:projects/gmail-assets.tgz
@ -19193,7 +19032,7 @@ packages:
dev: false
file:projects/minio.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-M7BBK96QskEOaVt8NqXAGDP0tuRqvPSWx8pPdz9dtN7MxoiaDqMMEKQbmYglXsvfLH3ROXvljh/iY9x2eHCmOg==, tarball: file:projects/minio.tgz}
resolution: {integrity: sha512-jolWjv7jH26LzuHm7YY2Qh805+IyHyjQMIiWqd7y74DFV5p7e3bylGhMUayhorye/LD/+k3k9tymBsEnMwy1WQ==, tarball: file:projects/minio.tgz}
id: file:projects/minio.tgz
name: '@rush-temp/minio'
version: 0.0.0
@ -20683,7 +20522,7 @@ packages:
dev: false
file:projects/pod-backup.tgz:
resolution: {integrity: sha512-yc02KSwBh3bR04WibDit9qk2laQjUBe6zoYEvGgj8otNXuxRdyU0HRYU+8U16eJ/1s963g4rfYnQwI07TglwNA==, tarball: file:projects/pod-backup.tgz}
resolution: {integrity: sha512-IpPeFal9+DNAhUVaRiV2T6Cck2EcF9siQuYTn37hXAc19+jxe4PdorAT/9a/YXw+ex8gnQYEHceOFCTO3cQjZw==, tarball: file:projects/pod-backup.tgz}
name: '@rush-temp/pod-backup'
version: 0.0.0
dependencies:
@ -20903,7 +20742,7 @@ packages:
dev: false
file:projects/presentation.tgz(@types/node@20.11.19)(esbuild@0.20.1)(postcss-load-config@4.0.2)(postcss@8.4.35)(ts-node@10.9.2):
resolution: {integrity: sha512-kt58pEFNpM1kpEL5v9iTrLO9yhjUseCGS9p5qdfeEieXRke3KQ6fkebHF7UJAGzjoJHTXHfxV/Unpp16c5G7Sg==, tarball: file:projects/presentation.tgz}
resolution: {integrity: sha512-UZsPzLjuuU4Y7WagdnqjcoAapZCVACgKL4m87xnMUPbrRuXZzLBGk/tw4a/8R3zxZu3LD3IvGb8gLqJIC+/7/g==, tarball: file:projects/presentation.tgz}
id: file:projects/presentation.tgz
name: '@rush-temp/presentation'
version: 0.0.0
@ -21042,7 +20881,7 @@ packages:
dev: false
file:projects/rank.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-CFhOCNYorpwPLIlkO8CSNQmkgYkgP2x2V5WpoJIffBiohZ9rbYaKJjTVZuDL0JeAkwyBNL/2JrZxu0bNN2adGA==, tarball: file:projects/rank.tgz}
resolution: {integrity: sha512-U9E48aTQN4WJiwY+0n7tVZf3t9GjLKoUAC2An8H7pJf9lJEWFsjGfLZSKIKyIBvdxMdFEgdsB2HospBEDpHLHQ==, tarball: file:projects/rank.tgz}
id: file:projects/rank.tgz
name: '@rush-temp/rank'
version: 0.0.0
@ -21476,7 +21315,7 @@ packages:
dev: false
file:projects/server-backup.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-/DF/SldrDwvsP+n5xM4K8z7OKb/7IGM/8sIX3YGLptz726V7IQYtYl9CXiMXsocr5FMKOjFkC6GsJ+LkG+Z31A==, tarball: file:projects/server-backup.tgz}
resolution: {integrity: sha512-DTB6NEyrBuFA9nJa37o2sM3WQcj4woxNQWHdTqUsrVk4AL5ApAB5zjk7A0V6aLQapI6IWs2GPNkfyGPPBw9d5Q==, tarball: file:projects/server-backup.tgz}
id: file:projects/server-backup.tgz
name: '@rush-temp/server-backup'
version: 0.0.0
@ -21695,7 +21534,7 @@ packages:
dev: false
file:projects/server-contact-resources.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-PgTX+UBSpgn+V8foqn9mebOduhVf9hYEf5v40MoQnXLX3W+2nrjr0kSvc6960nqqFh0j/i4R+ZKArQeI4DtF0w==, tarball: file:projects/server-contact-resources.tgz}
resolution: {integrity: sha512-+j7MczD2APhAJbJjlWP1f5UDt1uniO8dGJqYHGvq1YiuxiSYuVZRakgqnB4E3EdhzFtbngsOJsCy2ptOygO6BQ==, tarball: file:projects/server-contact-resources.tgz}
id: file:projects/server-contact-resources.tgz
name: '@rush-temp/server-contact-resources'
version: 0.0.0
@ -21757,7 +21596,7 @@ packages:
dev: false
file:projects/server-core.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-AVji1RU9Jo+286+sseqnWc74SfOyGASE93qPyJO4c2ZlydAoSFJ6IbJS3eOj32MVj6aW0gNpv4WC392FePwE5g==, tarball: file:projects/server-core.tgz}
resolution: {integrity: sha512-fOY9ubZ+b2WFnOdM0uaH7GIkNPmilqKPWofRKD4X0ONzHER4Opr2WzyC7eZsn35hxn9hSh0+7dyVuDzB3WZk4w==, tarball: file:projects/server-core.tgz}
id: file:projects/server-core.tgz
name: '@rush-temp/server-core'
version: 0.0.0
@ -22755,7 +22594,7 @@ packages:
dev: false
file:projects/server-tool.tgz(@types/node@20.11.19)(bufferutil@4.0.8)(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-H2X9ao6O1+7WbsWS4pHWmrgE3VqrE5JwJAHefW+H7IC3w90IwTILOaT9Go+xkiE+QbYyLQa+iMCBAaLGDBS6dg==, tarball: file:projects/server-tool.tgz}
resolution: {integrity: sha512-L9yNeJTmNheVBwCQVJEuoI/wL0dYbdnGirht9qyiAX1pzjSRx7sOh/lF3ViJ6bYnpvaxjK+a9XHTFdEj6LA4vg==, tarball: file:projects/server-tool.tgz}
id: file:projects/server-tool.tgz
name: '@rush-temp/server-tool'
version: 0.0.0
@ -22963,7 +22802,7 @@ packages:
dev: false
file:projects/server.tgz(esbuild@0.20.1):
resolution: {integrity: sha512-KEmCP/u/cMeibwaVIC8p1Z/MEfs2ju3oaaeXQzg7wlN6EgN3iq8mmJVRgBGkdbA1GdEYt003ROejb75q5ZssQg==, tarball: file:projects/server.tgz}
resolution: {integrity: sha512-zeGwDAlmb0rWftJk5lRXvGNhPB41b+vkBjFBbD619bHqpu27JQYLvmJFKetK/sCJyXZH3SpAl8ct8agLVtkkSA==, tarball: file:projects/server.tgz}
id: file:projects/server.tgz
name: '@rush-temp/server'
version: 0.0.0

View File

@ -1,7 +0,0 @@
module.exports = {
extends: ['./node_modules/@hcengineering/platform-rig/profiles/default/eslint.config.json'],
parserOptions: {
tsconfigRootDir: __dirname,
project: './tsconfig.json'
}
}

View File

@ -1,4 +0,0 @@
*
!/lib/**
!CHANGELOG.md
/lib/**/__tests__/

View File

@ -1,7 +0,0 @@
FROM node
WORKDIR /usr/src/app
COPY bundle/bundle.js ./
CMD [ "bash" ]

View File

@ -1,20 +0,0 @@
#!/bin/bash
#
# Copyright © 2020, 2021 Anticrm Platform Contributors.
# Copyright © 2021 Hardcore Engineering Inc.
#
# Licensed under the Eclipse Public License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
rushx bundle
rushx docker:build
rushx docker:push

View File

@ -1,4 +0,0 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
"rigPackageName": "@hcengineering/platform-rig"
}

View File

@ -1,7 +0,0 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
testMatch: ['**/?(*.)+(spec|test).[jt]s?(x)'],
roots: ["./src"],
coverageReporters: ["text-summary", "html"]
}

View File

@ -1,69 +0,0 @@
{
"name": "@hcengineering/generator",
"version": "0.6.0",
"main": "lib/index.js",
"svelte": "src/index.ts",
"types": "types/index.d.ts",
"author": "Anticrm Platform Contributors",
"license": "EPL-2.0",
"scripts": {
"build": "compile",
"build:watch": "compile",
"start": "ts-node src/index.ts",
"bundle": "mkdir -p bundle && esbuild src/index.ts --bundle --minify --platform=node > bundle/bundle.js",
"_phase:bundle": "rushx bundle",
"run-local": "TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost ts-node ./src/index.ts",
"format": "format src",
"test": "jest --passWithNoTests --silent",
"_phase:build": "compile transpile src",
"_phase:test": "jest --passWithNoTests --silent",
"_phase:format": "format src",
"_phase:validate": "compile validate"
},
"devDependencies": {
"@hcengineering/platform-rig": "^0.6.0",
"@typescript-eslint/eslint-plugin": "^6.11.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-promise": "^6.1.1",
"eslint-plugin-n": "^15.4.0",
"eslint": "^8.54.0",
"ts-node": "^10.8.0",
"esbuild": "^0.20.0",
"@types/node": "~20.11.16",
"@typescript-eslint/parser": "^6.11.0",
"eslint-config-standard-with-typescript": "^40.0.0",
"prettier": "^3.1.0",
"typescript": "^5.3.3",
"@types/ws": "^8.5.3",
"@types/faker": "~5.5.9",
"jest": "^29.7.0",
"ts-jest": "^29.1.1",
"@types/jest": "^29.5.5"
},
"dependencies": {
"@hcengineering/account": "^0.6.0",
"@hcengineering/attachment": "^0.6.9",
"@hcengineering/chunter": "^0.6.12",
"@hcengineering/client": "^0.6.14",
"@hcengineering/client-resources": "^0.6.23",
"@hcengineering/contact": "^0.6.20",
"@hcengineering/core": "^0.6.28",
"@hcengineering/minio": "^0.6.0",
"@hcengineering/model": "^0.6.7",
"@hcengineering/model-all": "^0.6.0",
"@hcengineering/model-recruit": "^0.6.0",
"@hcengineering/model-telegram": "^0.6.0",
"@hcengineering/platform": "^0.6.9",
"@hcengineering/recruit": "^0.6.21",
"@hcengineering/server-token": "^0.6.7",
"@hcengineering/task": "^0.6.13",
"@hcengineering/telegram": "^0.6.14",
"@hcengineering/tracker": "^0.6.13",
"@types/pdfkit": "~0.12.3",
"commander": "^8.1.0",
"faker": "~5.5.3",
"jpeg-js": "~0.4.3",
"pdfkit": "~0.13.0",
"ws": "^8.10.0"
}
}

View File

@ -1,13 +0,0 @@
# Overview
Random data generator
## Usage
```bash
cd ./dev/generator
rushx run-local gen recruit workspace 20
rushx run-local gen issue workspace 20
```
Will generate 20 candidate cards.

View File

@ -1,25 +0,0 @@
#!/bin/bash
# Copyright © 2020, 2021 Anticrm Platform Contributors.
# Copyright © 2021 Hardcore Engineering Inc.
#
# Licensed under the Eclipse Public License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
export MINIO_ENDPOINT=$(kubectl get secret minio -o jsonpath="{.data.endpoint}" | base64 --decode)
export MINIO_ACCESS_KEY=$(kubectl get secret minio -o jsonpath="{.data.accessKey}" | base64 --decode)
export MINIO_SECRET_KEY=$(kubectl get secret minio -o jsonpath="{.data.secretKey}" | base64 --decode)
kubectl run anticrm-tool --rm --tty -i --restart='Never' \
--env="TRANSACTOR_URL=ws://transactor/" \
--env="MINIO_ENDPOINT=$MINIO_ENDPOINT" \
--env="MINIO_ACCESS_KEY=$MINIO_ACCESS_KEY" \
--env="MINIO_SECRET_KEY=$MINIO_SECRET_KEY" --image anticrm/tool --command -- bash

View File

@ -1,61 +0,0 @@
import attachment, { Attachment } from '@hcengineering/attachment'
import { Class, Doc, generateId, Ref, Space, TxOperations, WorkspaceId } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import faker from 'faker'
import PDFDocument from 'pdfkit'
export interface AttachmentOptions {
min: number
max: number
deleteFactor: number // 0-100 value, will delete just added attachment, below min with rate
}
export async function addAttachments<T extends Doc> (
options: AttachmentOptions,
client: TxOperations,
minio: MinioService,
workspaceId: WorkspaceId,
space: Ref<Space>,
objectId: Ref<T>,
_class: Ref<Class<T>>,
collection: string
): Promise<void> {
const attachmentCount = options.min + faker.datatype.number(options.max)
for (let i = 0; i < attachmentCount; i++) {
const attachmentId = `candidate-attachment-${generateId()}-${i}` as Ref<Attachment>
const needDelete = i >= options.min && faker.datatype.number(100) > options.deleteFactor
let bufLen = 0
if (!needDelete) {
const doc = new PDFDocument()
doc.fontSize(16).text(faker.lorem.paragraph(faker.datatype.number(50)))
doc.end()
const buf = doc.read()
bufLen = buf.length
await minio.put(workspaceId, attachmentId, buf, bufLen, { 'Content-Type': 'application/pdf' })
}
await client.addCollection(
attachment.class.Attachment,
space,
objectId,
_class,
'attachments',
{
name: faker.system.commonFileName('pdf'),
file: attachmentId,
type: 'application/pdf',
size: bufLen,
lastModified: faker.date.past().getTime()
},
attachmentId
)
if (needDelete) {
await client.removeCollection(attachment.class.Attachment, space, attachmentId, objectId, _class, 'attachments')
}
}
}

View File

@ -1,38 +0,0 @@
import chunter, { ChatMessage } from '@hcengineering/chunter'
import { AttachedData, Class, Doc, generateId, Ref, Space, TxOperations } from '@hcengineering/core'
import faker from 'faker'
export interface CommentOptions {
min: number
max: number
paragraphMin: number
paragraphMax: number
updateFactor: number // 0-100 value, will generate random value and if value is less updateFactor it will be updated.
}
export async function addComments<T extends Doc> (
options: CommentOptions,
client: TxOperations,
space: Ref<Space>,
objectId: Ref<T>,
_class: Ref<Class<T>>,
collection: string
): Promise<void> {
const commentsCount = options.min + faker.datatype.number(options.max)
for (let i = 0; i < commentsCount; i++) {
const commentId = `candidate-comment-${generateId()}-${i}` as Ref<ChatMessage>
const commentData: AttachedData<ChatMessage> = {
message: faker.lorem.paragraphs(options.paragraphMin + faker.datatype.number(options.paragraphMax))
}
await client.addCollection(chunter.class.ChatMessage, space, objectId, _class, collection, commentData, commentId)
if (faker.datatype.number(100) > options.updateFactor) {
const updateMsg = faker.lorem.paragraphs(options.paragraphMin + faker.datatype.number(options.paragraphMax))
await client.updateCollection(chunter.class.ChatMessage, space, commentId, objectId, _class, collection, {
message: updateMsg
})
}
}
}

View File

@ -1,17 +0,0 @@
import client from '@hcengineering/client'
import clientResources from '@hcengineering/client-resources'
import { Client, systemAccountEmail, WorkspaceId } from '@hcengineering/core'
import { setMetadata } from '@hcengineering/platform'
import { generateToken } from '@hcengineering/server-token'
// eslint-disable-next-line
const WebSocket = require('ws')
export async function connect (transactorUrl: string, workspace: WorkspaceId): Promise<Client> {
console.log('connecting to transactor...')
const token = generateToken(systemAccountEmail, workspace)
// We need to override default factory with 'ws' one.
setMetadata(client.metadata.ClientSocketFactory, (url) => new WebSocket(url))
return await (await clientResources()).function.GetClient(token, transactorUrl)
}

View File

@ -1,97 +0,0 @@
//
// Copyright © 2020, 2021 Anticrm Platform Contributors.
// Copyright © 2021 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { program } from 'commander'
import { MinioService } from '@hcengineering/minio'
import { generateIssues } from './issues'
import { generateContacts } from './recruit'
import { getWorkspaceId } from '@hcengineering/core'
const transactorUrl = process.env.TRANSACTOR_URL
if (transactorUrl === undefined) {
console.error('please provide transactor url.')
process.exit(1)
}
const minioEndpoint = process.env.MINIO_ENDPOINT
if (minioEndpoint === undefined) {
console.error('please provide minio endpoint')
process.exit(1)
}
const minioAccessKey = process.env.MINIO_ACCESS_KEY
if (minioAccessKey === undefined) {
console.error('please provide minio access key')
process.exit(1)
}
const minioSecretKey = process.env.MINIO_SECRET_KEY
if (minioSecretKey === undefined) {
console.error('please provide minio secret key')
process.exit(1)
}
const minio = new MinioService({
endPoint: minioEndpoint,
port: 9000,
useSSL: false,
accessKey: minioAccessKey,
secretKey: minioSecretKey
})
program.version('0.0.1')
// available types: recruit, issue
program
.command('gen <genType> <workspace> <productId> <count>')
.description('generate a bunch of random candidates with attachemnts and comments or issues')
.option('-r, --random', 'generate random ids. So every call will add count <count> more candidates.', false)
.option('-l, --lite', 'use same pdf and same account for applicant and candidates', false)
.action(async (genType: string, workspace: string, productId: string, count: number, cmd) => {
switch (genType) {
case 'recruit': {
await generateContacts(
transactorUrl,
getWorkspaceId(workspace, productId),
{
contacts: count,
random: cmd.random as boolean,
comments: { min: 1, max: 10, paragraphMin: 1, paragraphMax: 20, updateFactor: 30 },
attachments: {
min: 1,
max: 3,
deleteFactor: 20
},
vacancy: 3,
applicants: { min: 50, max: 200, applicantUpdateFactor: 70 },
lite: cmd.lite as boolean
},
minio
)
return
}
case 'issue': {
await generateIssues(transactorUrl, getWorkspaceId(workspace, productId), {
count
})
return
}
default:
console.error(`Expected types: recruit, issue. Got type: ${genType}`)
}
})
program.parse(process.argv)

View File

@ -1,121 +0,0 @@
import faker from 'faker'
import contact from '@hcengineering/contact'
import core, {
AttachedData,
generateId,
MeasureMetricsContext,
metricsToString,
Ref,
SortingOrder,
TxOperations,
WorkspaceId
} from '@hcengineering/core'
import tracker, { Issue, IssuePriority, IssueStatus, Project } from '@hcengineering/tracker'
import { connect } from './connect'
import { makeRank } from '@hcengineering/task'
let objectId: Ref<Issue> = generateId()
const space = tracker.project.DefaultProject
const object: AttachedData<Issue> = {
title: '',
description: '',
assignee: null,
component: null,
milestone: null,
number: 0,
rank: '',
status: '' as Ref<IssueStatus>,
priority: IssuePriority.NoPriority,
dueDate: null,
comments: 0,
subIssues: 0,
parents: [],
reportedTime: 0,
remainingTime: 0,
estimation: 0,
reports: 0,
childInfo: [],
identifier: '',
kind: tracker.taskTypes.Issue
}
export interface IssueOptions {
count: number // how many issues to add
}
export async function generateIssues (
transactorUrl: string,
workspaceId: WorkspaceId,
options: IssueOptions
): Promise<void> {
const connection = await connect(transactorUrl, workspaceId)
const accounts = await connection.findAll(contact.class.PersonAccount, {})
const account = faker.random.arrayElement(accounts)
const client = new TxOperations(connection, account._id)
const ctx = new MeasureMetricsContext('recruit', {})
const statuses = (await client.findAll(tracker.class.IssueStatus, { space }, { projection: { _id: 1 } })).map(
(p) => p._id
)
for (let index = 0; index < options.count; index++) {
console.log(`Generating issue ${index + 1}...`)
await genIssue(client, statuses)
}
await connection.close()
ctx.end()
console.info(metricsToString(ctx.metrics, 'Client', 70))
}
async function genIssue (client: TxOperations, statuses: Ref<IssueStatus>[]): Promise<void> {
const lastOne = await client.findOne<Issue>(tracker.class.Issue, {}, { sort: { rank: SortingOrder.Descending } })
const incResult = await client.updateDoc(
tracker.class.Project,
core.space.Space,
space,
{
$inc: { sequence: 1 }
},
true
)
const project = (incResult as any).object as Project
const number = project.sequence
const value: AttachedData<Issue> = {
title: faker.commerce.productName(),
description: faker.lorem.paragraphs(),
assignee: object.assignee,
component: object.component,
milestone: object.milestone,
number,
status: faker.random.arrayElement(statuses),
priority: faker.random.arrayElement(Object.values(IssuePriority)) as IssuePriority,
rank: makeRank(lastOne?.rank, undefined),
comments: 0,
subIssues: 0,
dueDate: object.dueDate,
parents: [],
reportedTime: 0,
remainingTime: 0,
estimation: object.estimation,
reports: 0,
relations: [],
childInfo: [],
identifier: `${project.identifier}-${number}`,
kind: tracker.taskTypes.Issue
}
await client.addCollection(
tracker.class.Issue,
space,
tracker.ids.NoParent,
tracker.class.Issue,
'subIssues',
value,
objectId
)
objectId = generateId()
}

View File

@ -1,53 +0,0 @@
import core, { MeasureContext, Ref, Status, TxOperations } from '@hcengineering/core'
import task, { Project } from '@hcengineering/task'
import { findOrUpdate } from './utils'
export async function createUpdateSpaceKanban (
ctx: MeasureContext,
spaceId: Ref<Project>,
client: TxOperations
): Promise<Ref<Status>[]> {
const rawStates = [
{ color: 9, name: 'Initial' },
{ color: 10, name: 'Intermidiate' },
{ color: 1, name: 'OverIntermidiate' },
{ color: 0, name: 'Done' },
{ color: 11, name: 'Invalid' }
]
const doneStates = [
{ category: task.statusCategory.Won, name: 'Won' },
{ category: task.statusCategory.Lost, name: 'Lost' }
]
const states: Ref<Status>[] = []
await Promise.all(
rawStates.map(async (st, i) => {
const sid = ('generated-' + spaceId + '.state.' + st.name.toLowerCase().replace(' ', '_')) as Ref<Status>
await ctx.with('find-or-update', {}, (ctx) =>
findOrUpdate(ctx, client, task.space.Statuses, core.class.Status, sid, {
ofAttribute: task.attribute.State,
name: st.name,
color: st.color,
category: task.statusCategory.Active
})
)
states.push(sid)
})
)
await Promise.all(
doneStates.map(async (st, i) => {
const sid = `generated-${spaceId}.state.${st.name.toLowerCase().replace(' ', '_')}` as Ref<Status>
await ctx.with('gen-done-state', {}, (ctx) =>
findOrUpdate(ctx, client, task.space.Statuses, core.class.Status, sid, {
ofAttribute: task.attribute.State,
name: st.name,
category: st.category
})
)
states.push(sid)
})
)
return states
}

View File

@ -1,348 +0,0 @@
import contact, { Channel, Employee, Person, PersonAccount } from '@hcengineering/contact'
import core, {
AttachedData,
Data,
MeasureContext,
MeasureMetricsContext,
MixinUpdate,
Ref,
Status,
TxOperations,
WorkspaceId,
generateId,
metricsToString
} from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import recruit from '@hcengineering/model-recruit'
import { Applicant, Candidate, Vacancy } from '@hcengineering/recruit'
import task, { ProjectType, TaskType, genRanks, type Rank } from '@hcengineering/task'
import faker from 'faker'
import jpeg, { BufferRet } from 'jpeg-js'
import { AttachmentOptions, addAttachments } from './attachments'
import { CommentOptions, addComments } from './comments'
import { connect } from './connect'
import { createUpdateSpaceKanban } from './kanban'
import { findOrUpdate, findOrUpdateAttached } from './utils'
export interface RecruitOptions {
random: boolean // random id prefix.
contacts: number // how many contacts to add
vacancy: number // Will add number of vacancies with applications.
// Comment generation control
comments: CommentOptions
// Attachment generation control
attachments: AttachmentOptions
applicants: {
min: number
max: number
applicantUpdateFactor: number
}
lite: boolean
}
export async function generateContacts (
transactorUrl: string,
workspaceId: WorkspaceId,
options: RecruitOptions,
minio: MinioService
): Promise<void> {
const connection = await connect(transactorUrl, workspaceId)
const accounts = await connection.findAll(contact.class.PersonAccount, {})
const accountIds = accounts.map((a) => a._id)
const emoloyeeIds = accounts.map((a) => a.person as Ref<Employee>)
const account = faker.random.arrayElement(accounts)
const client = new TxOperations(connection, account._id)
const candidates: Ref<Candidate>[] = []
const ctx = new MeasureMetricsContext('recruit', { contacts: options.contacts })
for (let i = 0; i < options.contacts; i++) {
await ctx.with('candidate', {}, (ctx) => genCandidate(ctx, i, minio, workspaceId, options, candidates, client))
}
// Work on Vacancy/Applications.
for (let i = 0; i < options.vacancy; i++) {
await ctx.with('vacancy', {}, (ctx) =>
genVacansyApplicants(ctx, accountIds, options, i, client, minio, workspaceId, candidates, emoloyeeIds)
)
}
await connection.close()
ctx.end()
console.info(metricsToString(ctx.metrics, 'Client', 70))
}
async function genVacansyApplicants (
ctx: MeasureContext,
accountIds: Ref<PersonAccount>[],
options: RecruitOptions,
i: number,
client: TxOperations,
minio: MinioService,
workspaceId: WorkspaceId,
candidates: Ref<Candidate>[],
emoloyeeIds: Ref<Employee>[]
): Promise<void> {
const vacancyId = (options.random ? `vacancy-${generateId()}-${i}` : `vacancy-genid-${i}`) as Ref<Vacancy>
const typeId = (options.random ? `vacancy-type-${generateId()}-${i}` : `vacancy-type-genid-${i}`) as Ref<ProjectType>
const states = await ctx.with('create-kanbad', {}, (ctx) => createUpdateSpaceKanban(ctx, vacancyId, client))
const type: Data<ProjectType> = {
name: faker.name.title(),
description: faker.lorem.sentences(2),
shortDescription: faker.lorem.sentences(1),
descriptor: recruit.descriptors.VacancyType,
private: false,
members: [],
archived: false,
tasks: [],
classic: false,
// TODO: Fix me.
statuses: states.map((s) => {
return { _id: s, taskType: '' as Ref<TaskType> }
}),
targetClass: recruit.class.Vacancy
}
await ctx.with('update', {}, (ctx) =>
findOrUpdate(ctx, client, core.space.Space, task.class.ProjectType, typeId, type)
)
const vacancy: Data<Vacancy> = {
name: faker.company.companyName(),
description: faker.lorem.sentences(2),
fullDescription: faker.lorem.sentences(10),
location: faker.address.city(),
members: accountIds,
number: faker.datatype.number(),
private: false,
archived: false,
type: typeId
}
console.log('Creating vacandy', vacancy.name)
// Update or create candidate
await ctx.with('update', {}, (ctx) =>
findOrUpdate(ctx, client, core.space.Space, recruit.class.Vacancy, vacancyId, vacancy)
)
console.log('Vacandy generated', vacancy.name)
if (!options.lite) {
await ctx.with('add-attachments', {}, () =>
addAttachments(
options.attachments,
client,
minio,
workspaceId,
vacancyId,
vacancyId,
recruit.class.Vacancy,
'attachments'
)
)
}
console.log('Vacancy attachments generated', vacancy.name)
const applicantsForCount = options.applicants.min + faker.datatype.number(options.applicants.max)
const applicantsFor = faker.random.arrayElements(candidates, applicantsForCount)
const ranks = genRanks(candidates.length)
for (let index = 0; index < applicantsFor.length; index++) {
const candidateId = applicantsFor[index]
await ctx.with('applicant', {}, (ctx) =>
genApplicant(ctx, vacancyId, candidateId, emoloyeeIds, states, client, options, minio, workspaceId, ranks[i])
)
}
}
async function genApplicant (
ctx: MeasureContext,
vacancyId: Ref<Vacancy>,
candidateId: Ref<Candidate>,
emoloyeeIds: Ref<Employee>[],
states: Ref<Status>[],
client: TxOperations,
options: RecruitOptions,
minio: MinioService,
workspaceId: WorkspaceId,
rank: Rank
): Promise<void> {
const applicantId = `vacancy-${vacancyId}-${candidateId}` as Ref<Applicant>
const number = faker.datatype.number()
const applicant: AttachedData<Applicant> = {
number,
assignee: faker.random.arrayElement(emoloyeeIds),
status: faker.random.arrayElement(states),
rank,
startDate: null,
dueDate: null,
kind: recruit.taskTypes.Applicant,
identifier: `APP-${number}`
}
// Update or create candidate
await findOrUpdateAttached(ctx, client, vacancyId, recruit.class.Applicant, applicantId, applicant, {
attachedTo: candidateId,
attachedClass: recruit.mixin.Candidate,
collection: 'applications'
})
await ctx.with('add-comment', {}, () =>
addComments(options.comments, client, vacancyId, applicantId, recruit.class.Vacancy, 'comments')
)
if (!options.lite) {
await ctx.with('add-attachment', {}, () =>
addAttachments(
options.attachments,
client,
minio,
workspaceId,
vacancyId,
applicantId,
recruit.class.Applicant,
'attachments'
)
)
}
if (faker.datatype.number(100) > options.applicants.applicantUpdateFactor) {
await ctx.with('update-collection', {}, () =>
client.updateCollection(
recruit.class.Applicant,
vacancyId,
applicantId,
candidateId,
recruit.class.Applicant,
'applications',
{
status: faker.random.arrayElement(states)
}
)
)
}
}
const liteAvatar = generateAvatar(0)
// @measure('Candidate')
async function genCandidate (
ctx: MeasureContext,
i: number,
minio: MinioService,
workspaceId: WorkspaceId,
options: RecruitOptions,
candidates: Ref<Candidate>[],
client: TxOperations
): Promise<void> {
const fName = faker.name.firstName()
const lName = faker.name.lastName()
const { imgId, jpegImageData } = options.lite ? liteAvatar : generateAvatar(i)
if (!options.lite) {
await ctx.with('avatar', {}, () =>
minio.put(workspaceId, imgId, jpegImageData.data, jpegImageData.data.length, { 'Content-Type': 'image/jpeg' })
)
}
const candidate: Data<Person> = {
name: fName + ',' + lName,
city: faker.address.city(),
avatar: imgId
}
const candidateMixin: MixinUpdate<Person, Candidate> = {
title: faker.name.title(),
onsite: faker.datatype.boolean(),
remote: faker.datatype.boolean(),
source: faker.lorem.lines(1)
}
const candidateId = (options.random ? `candidate-${generateId()}-${i}` : `candidate-genid-${i}`) as Ref<Candidate>
candidates.push(candidateId)
const channelId = (options.random ? `channel-${generateId()}-${i}` : `channel-genid-${i}`) as Ref<Channel>
// Update or create candidate
await ctx.with('find-update', {}, async () => {
await findOrUpdate(ctx, client, recruit.space.CandidatesPublic, contact.class.Person, candidateId, candidate)
await findOrUpdateAttached(
ctx,
client,
recruit.space.CandidatesPublic,
contact.class.Channel,
channelId,
{
provider: contact.channelProvider.Email,
value: faker.internet.email(fName, lName)
},
{
attachedTo: candidateId,
attachedClass: contact.class.Person,
collection: 'channels'
}
)
await client.updateMixin(
candidateId,
contact.class.Person,
recruit.space.CandidatesPublic,
recruit.mixin.Candidate,
candidateMixin
)
})
await ctx.with('add-comment', {}, () =>
addComments(options.comments, client, recruit.space.CandidatesPublic, candidateId, contact.class.Person, 'comments')
)
if (!options.lite) {
await ctx.with('add-attachment', {}, () =>
addAttachments(
options.attachments,
client,
minio,
workspaceId,
recruit.space.CandidatesPublic,
candidateId,
contact.class.Person,
'attachments'
)
)
}
console.log('Candidate', candidates.length, fName, lName, ' generated')
}
function generateAvatar (pos: number): { imgId: string, jpegImageData: BufferRet } {
const imgId = generateId()
const width = 128
const height = 128
const frameData = Buffer.alloc(width * height * 4)
let i = 0
const baseR = faker.datatype.number(255)
const baseG = faker.datatype.number(255)
const baseB = faker.datatype.number(255)
while (i < frameData.length) {
frameData[i++] = (baseR + faker.datatype.number(100)) % 255 // red
frameData[i++] = (baseG + faker.datatype.number(100)) % 255 // green
frameData[i++] = (baseB + faker.datatype.number(100)) % 255 // blue
frameData[i++] = 0xff // alpha - ignored in JPEGs
}
const rawImageData = {
data: frameData,
width,
height
}
const jpegImageData = jpeg.encode(rawImageData, 50)
return { imgId, jpegImageData }
}

View File

@ -1,62 +0,0 @@
import {
AttachedData,
AttachedDoc,
Class,
Data,
Doc,
DocumentUpdate,
MeasureContext,
Ref,
Space,
TxOperations
} from '@hcengineering/core'
export async function findOrUpdate<T extends Doc> (
ctx: MeasureContext,
client: TxOperations,
space: Ref<Space>,
_class: Ref<Class<T>>,
objectId: Ref<T>,
data: Data<T>
): Promise<boolean> {
const existingObj = await client.findOne<Doc>(_class, { _id: objectId, space })
if (existingObj !== undefined) {
await client.updateDoc(_class, space, objectId, data)
return false
} else {
await client.createDoc(_class, space, data, objectId)
return true
}
}
export async function findOrUpdateAttached<T extends AttachedDoc> (
ctx: MeasureContext,
client: TxOperations,
space: Ref<Space>,
_class: Ref<Class<T>>,
objectId: Ref<T>,
data: AttachedData<T>,
attached: { attachedTo: Ref<Doc>, attachedClass: Ref<Class<Doc>>, collection: string }
): Promise<void> {
const existingObj = await client.findOne<Doc>(_class, { _id: objectId, space })
if (existingObj !== undefined) {
await client.updateCollection(
_class,
space,
objectId,
attached.attachedTo,
attached.attachedClass,
attached.collection,
data as unknown as DocumentUpdate<T>
)
} else {
await client.addCollection(
_class,
space,
attached.attachedTo,
attached.attachedClass,
attached.collection,
data,
objectId
)
}
}

View File

@ -1,10 +0,0 @@
{
"extends": "./node_modules/@hcengineering/platform-rig/profiles/default/tsconfig.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./lib",
"declarationDir": "./types",
"tsBuildInfoFile": ".build/build.tsbuildinfo"
}
}

View File

@ -16,7 +16,6 @@
import { prepareTools as prepareToolsRaw } from '@hcengineering/server-tool'
import { type Data, type Tx, type Version } from '@hcengineering/core'
import { type MinioService } from '@hcengineering/minio'
import { type MigrateOperation } from '@hcengineering/model'
import builder, { getModelVersion, migrateOperations } from '@hcengineering/model-all'
import { devTool } from '.'
@ -28,6 +27,7 @@ import { serverCalendarId } from '@hcengineering/server-calendar'
import { serverChunterId } from '@hcengineering/server-chunter'
import { serverCollaborationId } from '@hcengineering/server-collaboration'
import { serverContactId } from '@hcengineering/server-contact'
import { type StorageAdapter } from '@hcengineering/server-core'
import { serverDocumentId } from '@hcengineering/server-document'
import { serverGmailId } from '@hcengineering/server-gmail'
import { serverGuestId } from '@hcengineering/server-guest'
@ -70,7 +70,7 @@ addLocation(serverGuestId, () => import('@hcengineering/server-guest-resources')
function prepareTools (): {
mongodbUri: string
minio: MinioService
storageAdapter: StorageAdapter
txes: Tx[]
version: Data<Version>
migrateOperations: [string, MigrateOperation][]

View File

@ -14,38 +14,38 @@
//
import attachment from '@hcengineering/attachment'
import chunter, { type ChatMessage } from '@hcengineering/chunter'
import contact from '@hcengineering/contact'
import { deepEqual } from 'fast-equals'
import core, {
DOMAIN_TX,
SortingOrder,
TxOperations,
TxProcessor,
generateId,
getObjectValue,
type BackupClient,
type Client as CoreClient,
DOMAIN_TX,
type Doc,
type Domain,
type Ref,
SortingOrder,
type TxCreateDoc,
TxOperations,
TxProcessor,
type WorkspaceId,
generateId,
getObjectValue
type WorkspaceId
} from '@hcengineering/core'
import { type MinioService } from '@hcengineering/minio'
import { getWorkspaceDB } from '@hcengineering/mongo'
import recruit from '@hcengineering/recruit'
import { type StorageAdapter } from '@hcengineering/server-core'
import { connect } from '@hcengineering/server-tool'
import tracker from '@hcengineering/tracker'
import tags, { type TagCategory, type TagElement, type TagReference } from '@hcengineering/tags'
import tracker from '@hcengineering/tracker'
import { deepEqual } from 'fast-equals'
import { MongoClient } from 'mongodb'
import chunter, { type ChatMessage } from '@hcengineering/chunter'
export const DOMAIN_ACTIVITY = 'activity' as Domain
export async function cleanWorkspace (
mongoUrl: string,
workspaceId: WorkspaceId,
minio: MinioService,
storageAdapter: StorageAdapter,
elasticUrl: string,
transactorUrl: string,
opt: { recruit: boolean, tracker: boolean, removedTx: boolean }
@ -67,14 +67,14 @@ export async function cleanWorkspace (
attachments.map((it) => it.file).concat(contacts.map((it) => it.avatar).filter((it) => it) as string[])
)
const minioList = await minio.list(workspaceId)
const minioList = await storageAdapter.list(workspaceId)
const toClean: string[] = []
for (const mv of minioList) {
if (!files.has(mv.name)) {
toClean.push(mv.name)
}
}
await minio.remove(workspaceId, toClean)
await storageAdapter.remove(workspaceId, toClean)
// connection.loadChunk(DOMAIN_BLOB, idx = )
if (opt.recruit) {
@ -145,16 +145,16 @@ export async function cleanWorkspace (
}
}
export async function fixMinioBW (workspaceId: WorkspaceId, minio: MinioService): Promise<void> {
export async function fixMinioBW (workspaceId: WorkspaceId, storageService: StorageAdapter): Promise<void> {
console.log('try clean bw miniature for ', workspaceId.name)
const from = new Date(new Date().setDate(new Date().getDate() - 7))
const list = await minio.list(workspaceId)
const list = await storageService.list(workspaceId)
console.log('found', list.length)
let removed = 0
for (const obj of list) {
if (obj.lastModified < from) continue
if (obj.name.includes('%size%')) {
await minio.remove(workspaceId, [obj.name])
await storageService.remove(workspaceId, [obj.name])
removed++
if (removed % 100 === 0) {
console.log('removed: ', removed)

View File

@ -15,14 +15,14 @@
import { Client as ElasticClient } from '@elastic/elasticsearch'
import core, { DOMAIN_DOC_INDEX_STATE, toWorkspaceString, type WorkspaceId } from '@hcengineering/core'
import { type MinioService } from '@hcengineering/minio'
import { getWorkspaceDB } from '@hcengineering/mongo'
import { type StorageAdapter } from '@hcengineering/server-core'
import { MongoClient } from 'mongodb'
export async function rebuildElastic (
mongoUrl: string,
workspaceId: WorkspaceId,
minio: MinioService,
storageAdapter: StorageAdapter,
elasticUrl: string
): Promise<void> {
const client = new MongoClient(mongoUrl)

View File

@ -40,7 +40,7 @@ import {
backup,
backupList,
createFileBackupStorage,
createMinioBackupStorage,
createStorageBackupStorage,
restore
} from '@hcengineering/server-backup'
import serverToken, { decodeToken, generateToken } from '@hcengineering/server-token'
@ -52,9 +52,9 @@ import { clearTelegramHistory } from './telegram'
import { diffWorkspace, updateField } from './workspace'
import { RateLimiter, getWorkspaceId, type AccountRole, type Data, type Tx, type Version } from '@hcengineering/core'
import { type MinioService } from '@hcengineering/minio'
import { consoleModelLogger, type MigrateOperation } from '@hcengineering/model'
import { openAIConfigDefaults } from '@hcengineering/openai'
import { type StorageAdapter } from '@hcengineering/server-core'
import path from 'path'
import { benchmark } from './benchmark'
import {
@ -76,7 +76,7 @@ import { openAIConfig } from './openai'
export function devTool (
prepareTools: () => {
mongodbUri: string
minio: MinioService
storageAdapter: StorageAdapter
txes: Tx[]
version: Data<Version>
migrateOperations: [string, MigrateOperation][]
@ -431,27 +431,27 @@ export function devTool (
.command('backup-s3 <bucketName> <dirName> <workspace>')
.description('dump workspace transactions and minio resources')
.action(async (bucketName: string, dirName: string, workspace: string, cmd) => {
const { minio } = prepareTools()
const { storageAdapter } = prepareTools()
const wsId = getWorkspaceId(workspace, productId)
const storage = await createMinioBackupStorage(minio, wsId, dirName)
const storage = await createStorageBackupStorage(storageAdapter, wsId, dirName)
await backup(transactorUrl, wsId, storage)
})
program
.command('backup-s3-restore <bucketName>, <dirName> <workspace> [date]')
.description('dump workspace transactions and minio resources')
.action(async (bucketName: string, dirName: string, workspace: string, date, cmd) => {
const { minio } = prepareTools()
const { storageAdapter } = prepareTools()
const wsId = getWorkspaceId(bucketName, productId)
const storage = await createMinioBackupStorage(minio, wsId, dirName)
const storage = await createStorageBackupStorage(storageAdapter, wsId, dirName)
await restore(transactorUrl, wsId, storage, parseInt(date ?? '-1'))
})
program
.command('backup-s3-list <bucketName> <dirName>')
.description('list snaphost ids for backup')
.action(async (bucketName: string, dirName: string, cmd) => {
const { minio } = prepareTools()
const { storageAdapter } = prepareTools()
const wsId = getWorkspaceId(bucketName, productId)
const storage = await createMinioBackupStorage(minio, wsId, dirName)
const storage = await createStorageBackupStorage(storageAdapter, wsId, dirName)
await backupList(storage)
})
@ -483,7 +483,7 @@ export function devTool (
.description('clear telegram history')
.option('-w, --workspace <workspace>', 'target workspace')
.action(async (workspace: string, cmd) => {
const { mongodbUri, minio } = prepareTools()
const { mongodbUri, storageAdapter: minio } = prepareTools()
await withDatabase(mongodbUri, async (db) => {
const telegramDB = process.env.TELEGRAM_DATABASE
if (telegramDB === undefined) {
@ -500,7 +500,7 @@ export function devTool (
.command('clear-telegram-all-history')
.description('clear telegram history')
.action(async (cmd) => {
const { mongodbUri, minio } = prepareTools()
const { mongodbUri, storageAdapter: minio } = prepareTools()
await withDatabase(mongodbUri, async (db) => {
const telegramDB = process.env.TELEGRAM_DATABASE
if (telegramDB === undefined) {
@ -537,7 +537,7 @@ export function devTool (
.option('--tracker', 'Clean tracker', false)
.option('--removedTx', 'Clean removed transactions', false)
.action(async (workspace: string, cmd: { recruit: boolean, tracker: boolean, removedTx: boolean }) => {
const { mongodbUri, minio } = prepareTools()
const { mongodbUri, storageAdapter: minio } = prepareTools()
await withDatabase(mongodbUri, async (db) => {
await cleanWorkspace(
mongodbUri,
@ -551,7 +551,7 @@ export function devTool (
})
program.command('fix-bw-workspace <workspace>').action(async (workspace: string) => {
const { minio } = prepareTools()
const { storageAdapter: minio } = prepareTools()
await fixMinioBW(getWorkspaceId(workspace, productId), minio)
})

View File

@ -15,7 +15,7 @@
//
import { DOMAIN_TX, type Ref, type WorkspaceId } from '@hcengineering/core'
import { type MinioService } from '@hcengineering/minio'
import { type StorageAdapter } from '@hcengineering/server-core'
import { DOMAIN_ATTACHMENT } from '@hcengineering/model-attachment'
import contact, { DOMAIN_CHANNEL } from '@hcengineering/model-contact'
import { DOMAIN_TELEGRAM } from '@hcengineering/model-telegram'
@ -32,7 +32,7 @@ export async function clearTelegramHistory (
mongoUrl: string,
workspaceId: WorkspaceId,
tgDb: string,
minio: MinioService
storageAdapter: StorageAdapter
): Promise<void> {
const client = new MongoClient(mongoUrl)
try {
@ -90,7 +90,7 @@ export async function clearTelegramHistory (
workspaceDB.collection(DOMAIN_ATTACHMENT).deleteMany({
attachedToClass: telegram.class.Message
}),
minio.remove(workspaceId, Array.from(attachments))
storageAdapter.remove(workspaceId, Array.from(attachments))
])
console.log('clearing telegram service data...')

View File

@ -56,6 +56,7 @@
"dotenv": "~16.0.0",
"got": "^11.8.3",
"@hcengineering/server-backup": "^0.6.0",
"@hcengineering/server-core": "^0.6.1",
"@hcengineering/minio": "^0.6.0"
}
}

View File

@ -16,12 +16,13 @@
import { getWorkspaceId } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { setMetadata } from '@hcengineering/platform'
import { backup, createMinioBackupStorage } from '@hcengineering/server-backup'
import { backup, createStorageBackupStorage } from '@hcengineering/server-backup'
import { type StorageAdapter } from '@hcengineering/server-core'
import serverToken from '@hcengineering/server-token'
import toolPlugin from '@hcengineering/server-tool'
import got from 'got'
import { type ObjectId } from 'mongodb'
import config from './config'
import toolPlugin from '@hcengineering/server-tool'
/**
* @public
@ -51,7 +52,7 @@ async function getWorkspaces (): Promise<Workspace[]> {
}
export class PlatformWorker {
minio!: MinioService
storageAdapter!: StorageAdapter
async close (): Promise<void> {}
@ -65,7 +66,7 @@ export class PlatformWorker {
minioPort = parseInt(sp[1])
}
this.minio = new MinioService({
this.storageAdapter = new MinioService({
endPoint: minioEndpoint,
port: minioPort,
useSSL: false,
@ -94,8 +95,8 @@ export class PlatformWorker {
for (const ws of workspaces) {
console.log('\n\nBACKUP WORKSPACE ', ws.workspace, ws.productId)
try {
const storage = await createMinioBackupStorage(
this.minio,
const storage = await createStorageBackupStorage(
this.storageAdapter,
getWorkspaceId('backups', ws.productId),
ws.workspace
)

View File

@ -25,6 +25,7 @@ import {
type ServerStorage,
type WorkspaceId
} from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { createElasticAdapter, createElasticBackupDataAdapter } from '@hcengineering/elastic'
import {
ConfigurationMiddleware,
@ -33,15 +34,14 @@ import {
QueryJoinMiddleware,
SpaceSecurityMiddleware
} from '@hcengineering/middleware'
import { MinioService } from '@hcengineering/minio'
import { createMongoAdapter, createMongoTxAdapter } from '@hcengineering/mongo'
import { OpenAIEmbeddingsStage, openAIId, openAIPluginImpl } from '@hcengineering/openai'
import { addLocation, addStringsLoader } from '@hcengineering/platform'
import {
BackupClientSession,
createMinioDataAdapter,
createNullAdapter,
createRekoniAdapter,
createStorageDataAdapter,
createYDocAdapter,
getMetricsContext,
type MinioConfig
@ -60,6 +60,7 @@ import {
FullTextPushStage,
globalIndexer,
IndexedFieldStage,
type StorageAdapter,
type ContentTextAdapter,
type DbConfiguration,
type FullTextAdapter,
@ -238,7 +239,7 @@ export function start (
workspace: WorkspaceId,
adapter: FullTextAdapter,
storage: ServerStorage,
storageAdapter: MinioService,
storageAdapter: StorageAdapter,
contentAdapter: ContentTextAdapter
): FullTextPipelineStage[] {
// Allow 2 workspaces to be indexed in parallel
@ -323,7 +324,7 @@ export function start (
url: ''
},
MinioData: {
factory: createMinioDataAdapter,
factory: createStorageDataAdapter,
url: ''
},
FullTextBlob: {

View File

@ -956,11 +956,6 @@
"projectFolder": "models/lead",
"shouldPublish": false
},
{
"packageName": "@hcengineering/generator",
"projectFolder": "dev/generator",
"shouldPublish": false
},
{
"packageName": "@hcengineering/storybook",
"projectFolder": "dev/storybook",

View File

@ -39,7 +39,6 @@
"@hcengineering/view": "^0.6.9",
"@hcengineering/login": "^0.6.8",
"@hcengineering/workbench": "^0.6.9",
"@hcengineering/minio": "^0.6.0",
"@hcengineering/notification": "^0.6.16"
}
}

View File

@ -43,6 +43,6 @@
"@hcengineering/model": "^0.6.7",
"tar-stream": "^2.2.0",
"@hcengineering/server-tool": "^0.6.0",
"@hcengineering/minio": "^0.6.0"
"@hcengineering/server-core": "^0.6.1"
}
}

View File

@ -1,5 +1,5 @@
import { WorkspaceId } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { StorageAdapter } from '@hcengineering/server-core'
import { createReadStream, createWriteStream, existsSync } from 'fs'
import { mkdir, readFile, writeFile } from 'fs/promises'
import { dirname, join } from 'path'
@ -51,9 +51,9 @@ class FileStorage implements BackupStorage {
}
}
class MinioStorage implements BackupStorage {
class AdapterStorage implements BackupStorage {
constructor (
readonly client: MinioService,
readonly client: StorageAdapter,
readonly workspaceId: WorkspaceId,
readonly root: string
) {}
@ -100,13 +100,13 @@ export async function createFileBackupStorage (fileName: string): Promise<Backup
/**
* @public
*/
export async function createMinioBackupStorage (
client: MinioService,
export async function createStorageBackupStorage (
client: StorageAdapter,
workspaceId: WorkspaceId,
root: string
): Promise<BackupStorage> {
if (!(await client.exists(workspaceId))) {
await client.make(workspaceId)
}
return new MinioStorage(client, workspaceId, root)
return new AdapterStorage(client, workspaceId, root)
}

View File

@ -34,7 +34,7 @@
},
"dependencies": {
"@hcengineering/core": "^0.6.28",
"@hcengineering/minio": "^0.6.0",
"@hcengineering/server-core": "^0.6.1",
"base64-js": "^1.5.1",
"yjs": "^13.5.52"
}

View File

@ -21,13 +21,13 @@ import {
WorkspaceId,
parseCollaborativeDoc
} from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { Doc as YDoc } from 'yjs'
import { StorageAdapter } from '@hcengineering/server-core'
import { yDocBranch } from '../history/branch'
import { YDocVersion } from '../history/history'
import { createYdocSnapshot, restoreYdocSnapshot } from '../history/snapshot'
import { yDocFromMinio, yDocToMinio } from './minio'
import { yDocFromStorage, yDocToStorage } from './minio'
/** @public */
export function collaborativeHistoryDocId (id: string): string {
@ -37,7 +37,7 @@ export function collaborativeHistoryDocId (id: string): string {
/** @public */
export async function loadCollaborativeDoc (
minio: MinioService,
storageAdapter: StorageAdapter,
workspace: WorkspaceId,
collaborativeDoc: CollaborativeDoc,
ctx: MeasureContext
@ -47,7 +47,7 @@ export async function loadCollaborativeDoc (
return await ctx.with('loadCollaborativeDoc', { type: 'content' }, async (ctx) => {
const yContent = await ctx.with('yDocFromMinio', { type: 'content' }, async () => {
return await yDocFromMinio(minio, workspace, documentId, new YDoc({ gc: false }))
return await yDocFromStorage(storageAdapter, workspace, documentId, new YDoc({ gc: false }))
})
// the document does not exist
@ -60,7 +60,7 @@ export async function loadCollaborativeDoc (
}
const yHistory = await ctx.with('yDocFromMinio', { type: 'history' }, async () => {
return await yDocFromMinio(minio, workspace, historyDocumentId, new YDoc())
return await yDocFromStorage(storageAdapter, workspace, historyDocumentId, new YDoc())
})
// the history document does not exist
@ -76,19 +76,19 @@ export async function loadCollaborativeDoc (
/** @public */
export async function saveCollaborativeDoc (
minio: MinioService,
storageAdapter: StorageAdapter,
workspace: WorkspaceId,
collaborativeDoc: CollaborativeDoc,
ydoc: YDoc,
ctx: MeasureContext
): Promise<void> {
const { documentId, versionId } = parseCollaborativeDoc(collaborativeDoc)
await saveCollaborativeDocVersion(minio, workspace, documentId, versionId, ydoc, ctx)
await saveCollaborativeDocVersion(storageAdapter, workspace, documentId, versionId, ydoc, ctx)
}
/** @public */
export async function saveCollaborativeDocVersion (
minio: MinioService,
storageAdapter: StorageAdapter,
workspace: WorkspaceId,
documentId: string,
versionId: CollaborativeDocVersion,
@ -98,7 +98,7 @@ export async function saveCollaborativeDocVersion (
await ctx.with('saveCollaborativeDoc', {}, async (ctx) => {
if (versionId === 'HEAD') {
await ctx.with('yDocToMinio', {}, async () => {
await yDocToMinio(minio, workspace, documentId, ydoc)
await yDocToStorage(storageAdapter, workspace, documentId, ydoc)
})
} else {
console.warn('Cannot save non HEAD document version', documentId, versionId)
@ -108,7 +108,7 @@ export async function saveCollaborativeDocVersion (
/** @public */
export async function removeCollaborativeDoc (
minio: MinioService,
storageAdapter: StorageAdapter,
workspace: WorkspaceId,
collaborativeDocs: CollaborativeDoc[],
ctx: MeasureContext
@ -125,7 +125,7 @@ export async function removeCollaborativeDoc (
}
if (toRemove.length > 0) {
await ctx.with('remove', {}, async () => {
await minio.remove(workspace, toRemove)
await storageAdapter.remove(workspace, toRemove)
})
}
})
@ -133,7 +133,7 @@ export async function removeCollaborativeDoc (
/** @public */
export async function copyCollaborativeDoc (
minio: MinioService,
storageAdapter: StorageAdapter,
workspace: WorkspaceId,
source: CollaborativeDoc,
target: CollaborativeDoc,
@ -149,7 +149,7 @@ export async function copyCollaborativeDoc (
await ctx.with('copyCollaborativeDoc', {}, async (ctx) => {
const ySource = await ctx.with('loadCollaborativeDocVersion', {}, async (ctx) => {
return await loadCollaborativeDoc(minio, workspace, source, ctx)
return await loadCollaborativeDoc(storageAdapter, workspace, source, ctx)
})
if (ySource === undefined) {
@ -161,14 +161,14 @@ export async function copyCollaborativeDoc (
})
await ctx.with('saveCollaborativeDocVersion', {}, async (ctx) => {
await saveCollaborativeDocVersion(minio, workspace, targetDocumentId, targetVersionId, yTarget, ctx)
await saveCollaborativeDocVersion(storageAdapter, workspace, targetDocumentId, targetVersionId, yTarget, ctx)
})
})
}
/** @public */
export async function takeCollaborativeDocSnapshot (
minio: MinioService,
storageAdapter: StorageAdapter,
workspace: WorkspaceId,
collaborativeDoc: CollaborativeDoc,
ydoc: YDoc,
@ -181,7 +181,7 @@ export async function takeCollaborativeDocSnapshot (
await ctx.with('takeCollaborativeDocSnapshot', {}, async (ctx) => {
const yHistory =
(await ctx.with('yDocFromMinio', { type: 'history' }, async () => {
return await yDocFromMinio(minio, workspace, historyDocumentId, new YDoc({ gc: false }))
return await yDocFromStorage(storageAdapter, workspace, historyDocumentId, new YDoc({ gc: false }))
})) ?? new YDoc()
await ctx.with('createYdocSnapshot', {}, async () => {
@ -189,7 +189,7 @@ export async function takeCollaborativeDocSnapshot (
})
await ctx.with('yDocToMinio', { type: 'history' }, async () => {
await yDocToMinio(minio, workspace, historyDocumentId, yHistory)
await yDocToStorage(storageAdapter, workspace, historyDocumentId, yHistory)
})
})
}

View File

@ -14,14 +14,14 @@
//
import { WorkspaceId } from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { StorageAdapter } from '@hcengineering/server-core'
import { Doc as YDoc } from 'yjs'
import { yDocFromBuffer, yDocToBuffer } from './ydoc'
/** @public */
export async function yDocFromMinio (
minio: MinioService,
export async function yDocFromStorage (
storageAdapter: StorageAdapter,
workspace: WorkspaceId,
minioDocumentId: string,
ydoc?: YDoc
@ -31,7 +31,7 @@ export async function yDocFromMinio (
ydoc ??= new YDoc({ gc: false })
try {
const buffer = await minio.read(workspace, minioDocumentId)
const buffer = await storageAdapter.read(workspace, minioDocumentId)
return yDocFromBuffer(Buffer.concat(buffer), ydoc)
} catch (err: any) {
if (err?.code === 'NoSuchKey' || err?.code === 'NotFound') {
@ -42,13 +42,13 @@ export async function yDocFromMinio (
}
/** @public */
export async function yDocToMinio (
minio: MinioService,
export async function yDocToStorage (
storageAdapter: StorageAdapter,
workspace: WorkspaceId,
minioDocumentId: string,
ydoc: YDoc
): Promise<void> {
const buffer = yDocToBuffer(ydoc)
const metadata = { 'content-type': 'application/ydoc' }
await minio.put(workspace, minioDocumentId, buffer, buffer.length, metadata)
await storageAdapter.put(workspace, minioDocumentId, buffer, buffer.length, metadata)
}

View File

@ -17,8 +17,8 @@ import {
YDocVersion,
collaborativeHistoryDocId,
createYdocSnapshot,
yDocFromMinio,
yDocToMinio
yDocFromStorage,
yDocToStorage
} from '@hcengineering/collaboration'
import { type TakeSnapshotRequest, type TakeSnapshotResponse } from '@hcengineering/collaborator-client'
import { CollaborativeDocVersionHead, MeasureContext, generateId, parseCollaborativeDoc } from '@hcengineering/core'
@ -57,7 +57,7 @@ export async function takeSnapshot (
const historyDocumentId = collaborativeHistoryDocId(minioDocumentId)
const yHistory =
(await ctx.with('yDocFromMinio', {}, async () => {
return await yDocFromMinio(minio, workspaceId, historyDocumentId)
return await yDocFromStorage(minio, workspaceId, historyDocumentId)
})) ?? new YDoc()
await ctx.with('createYdocSnapshot', {}, async () => {
@ -67,7 +67,7 @@ export async function takeSnapshot (
})
await ctx.with('yDocToMinio', {}, async () => {
await yDocToMinio(minio, workspaceId, historyDocumentId, yHistory)
await yDocToStorage(minio, workspaceId, historyDocumentId, yHistory)
})
return { ...version }

View File

@ -1,5 +1,5 @@
module.exports = {
extends: ['./node_modules/@hcengineering/platform-rig/profiles/default/eslint.config.json'],
extends: ['./node_modules/@hcengineering/platform-rig/profiles/node/eslint.config.json'],
parserOptions: {
tsconfigRootDir: __dirname,
project: './tsconfig.json'

View File

@ -1,4 +1,5 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
"rigPackageName": "@hcengineering/platform-rig"
"rigPackageName": "@hcengineering/platform-rig",
"rigProfile": "node"
}

View File

@ -5,14 +5,15 @@
"svelte": "src/index.ts",
"types": "types/index.d.ts",
"author": "Anticrm Platform Contributors",
"template": "@hcengineering/node-package",
"license": "EPL-2.0",
"scripts": {
"build": "compile",
"build:watch": "compile",
"format": "format src",
"test": "jest --passWithNoTests --silent",
"test": "jest --passWithNoTests --silent --forceExit",
"_phase:build": "compile transpile src",
"_phase:test": "jest --passWithNoTests --silent",
"_phase:test": "jest --passWithNoTests --silent --forceExit",
"_phase:format": "format src",
"_phase:validate": "compile validate"
},
@ -36,7 +37,6 @@
"dependencies": {
"@hcengineering/core": "^0.6.28",
"@hcengineering/platform": "^0.6.9",
"@hcengineering/minio": "^0.6.0",
"@hcengineering/query": "^0.6.8",
"fast-equals": "^2.0.3",
"html-to-text": "^9.0.3"

View File

@ -14,25 +14,24 @@
//
import {
Class,
Doc,
DocumentQuery,
DocumentUpdate,
Domain,
FindOptions,
FindResult,
Hierarchy,
IndexingConfiguration,
MeasureContext,
ModelDb,
Ref,
StorageIterator,
toFindResult,
Tx,
TxResult,
WorkspaceId
type Class,
type Doc,
type DocumentQuery,
type DocumentUpdate,
type Domain,
type FindOptions,
type FindResult,
type Hierarchy,
type IndexingConfiguration,
type MeasureContext,
type ModelDb,
type Ref,
type StorageIterator,
type Tx,
type TxResult,
type WorkspaceId
} from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { type StorageAdapter } from './storage'
/**
* @public
@ -82,97 +81,5 @@ export type DbAdapterFactory = (
url: string,
workspaceId: WorkspaceId,
modelDb: ModelDb,
storage?: MinioService
storage?: StorageAdapter
) => Promise<DbAdapter>
/**
* @public
*/
export interface DbAdapterConfiguration {
factory: DbAdapterFactory
url: string
}
/**
* @public
*/
export class DummyDbAdapter implements DbAdapter {
async init (model: Tx[]): Promise<void> {}
async findAll<T extends Doc>(
ctx: MeasureContext,
_class: Ref<Class<T>>,
query: DocumentQuery<T>,
options?: FindOptions<T> | undefined
): Promise<FindResult<T>> {
return toFindResult([])
}
async createIndexes (domain: Domain, config: Pick<IndexingConfiguration<Doc>, 'indexes'>): Promise<void> {}
async removeOldIndex (domain: Domain, deletePattern: RegExp, keepPattern: RegExp): Promise<void> {}
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
return []
}
async close (): Promise<void> {}
find (domain: Domain): StorageIterator {
return {
next: async () => undefined,
close: async () => {}
}
}
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
return []
}
async upload (domain: Domain, docs: Doc[]): Promise<void> {}
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {}
async update (domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>): Promise<void> {}
}
class InMemoryAdapter extends DummyDbAdapter implements DbAdapter {
private readonly modeldb: ModelDb
constructor (hierarchy: Hierarchy) {
super()
this.modeldb = new ModelDb(hierarchy)
}
async findAll<T extends Doc>(
ctx: MeasureContext,
_class: Ref<Class<T>>,
query: DocumentQuery<T>,
options?: FindOptions<T>
): Promise<FindResult<T>> {
return await this.modeldb.findAll(_class, query, options)
}
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
return await this.modeldb.tx(...tx)
}
async init (model: Tx[]): Promise<void> {
for (const tx of model) {
try {
await this.modeldb.tx(tx)
} catch (err: any) {
console.error('skip broken TX', err)
}
}
}
}
/**
* @public
*/
export async function createInMemoryAdapter (
hierarchy: Hierarchy,
url: string,
workspaceId: WorkspaceId
): Promise<DbAdapter> {
return new InMemoryAdapter(hierarchy)
}

View File

@ -0,0 +1,66 @@
//
// Copyright © 2020, 2021 Anticrm Platform Contributors.
// Copyright © 2021 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import { type MeasureContext, type ServerStorage, type WorkspaceIdWithUrl } from '@hcengineering/core'
import { type DbAdapterFactory } from './adapter'
import { type FullTextPipelineStage } from './indexer/types'
import { type StorageAdapter } from './storage'
import type { ContentTextAdapter, ContentTextAdapterFactory, FullTextAdapter, FullTextAdapterFactory } from './types'
/**
* @public
*/
export interface DbAdapterConfiguration {
factory: DbAdapterFactory
url: string
}
/**
* @public
*/
export interface ContentTextAdapterConfiguration {
factory: ContentTextAdapterFactory
contentType: string
url: string
}
/**
* @public
*/
export type FullTextPipelineStageFactory = (
adapter: FullTextAdapter,
storage: ServerStorage,
storageAdapter: StorageAdapter,
contentAdapter: ContentTextAdapter
) => FullTextPipelineStage[]
/**
* @public
*/
export interface DbConfiguration {
adapters: Record<string, DbAdapterConfiguration>
domains: Record<string, string>
defaultAdapter: string
workspace: WorkspaceIdWithUrl
metrics: MeasureContext
fulltextAdapter: {
factory: FullTextAdapterFactory
url: string
stages: FullTextPipelineStageFactory
}
contentAdapters: Record<string, ContentTextAdapterConfiguration>
defaultContentAdapter: string
storageFactory?: () => StorageAdapter
}

View File

@ -13,9 +13,10 @@
// limitations under the License.
//
import { MeasureContext, WorkspaceId } from '@hcengineering/core'
import { ContentTextAdapter, ContentTextAdapterConfiguration } from './types'
import { Readable } from 'stream'
import { type MeasureContext, type WorkspaceId } from '@hcengineering/core'
import { type Readable } from 'stream'
import { type ContentTextAdapterConfiguration } from './configuration'
import { type ContentTextAdapter } from './types'
class ContentAdapter implements ContentTextAdapter {
constructor (

View File

@ -15,36 +15,36 @@
//
import core, {
AttachedDoc,
Class,
Doc,
DocIndexState,
DocumentQuery,
FindOptions,
FindResult,
Hierarchy,
MeasureContext,
ObjQueryType,
Ref,
SearchOptions,
SearchQuery,
SearchResult,
ServerStorage,
Tx,
TxCUD,
TxCollectionCUD,
type AttachedDoc,
type Class,
type Doc,
type DocIndexState,
type DocumentQuery,
type FindOptions,
type FindResult,
type Hierarchy,
type MeasureContext,
type ObjQueryType,
type Ref,
type SearchOptions,
type SearchQuery,
type SearchResult,
type ServerStorage,
type Tx,
type TxCUD,
type TxCollectionCUD,
TxFactory,
TxResult,
WorkspaceId,
type TxResult,
type WorkspaceId,
docKey,
isFullTextAttribute,
isIndexedAttribute,
toFindResult
} from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { FullTextIndexPipeline } from './indexer'
import { type FullTextIndexPipeline } from './indexer'
import { createStateDoc, isClassIndexable } from './indexer/utils'
import { getScoringConfig, mapSearchResultDoc } from './mapper'
import { type StorageAdapter } from './storage'
import type { FullTextAdapter, IndexedDoc, WithFind } from './types'
/**
@ -58,7 +58,7 @@ export class FullTextIndex implements WithFind {
private readonly hierarchy: Hierarchy,
private readonly adapter: FullTextAdapter,
private readonly dbStorage: ServerStorage,
readonly storageAdapter: MinioService | undefined,
readonly storageAdapter: StorageAdapter | undefined,
readonly workspace: WorkspaceId,
readonly indexer: FullTextIndexPipeline,
private readonly upgrade: boolean

View File

@ -15,10 +15,13 @@
//
export * from './adapter'
export * from './types'
export * from './configuration'
export * from './fulltext'
export * from './storage'
export * from './pipeline'
export * from './indexer'
export * from './limitter'
export * from './mem'
export * from './pipeline'
export { default, serverCoreId } from './plugin'
export * from './server'
export * from './storage'
export * from './types'

View File

@ -14,19 +14,25 @@
//
import core, {
Class,
Doc,
DocIndexState,
DocumentQuery,
DocumentUpdate,
MeasureContext,
Ref,
WorkspaceId
type Class,
type Doc,
type DocIndexState,
type DocumentQuery,
type DocumentUpdate,
type MeasureContext,
type Ref,
type WorkspaceId
} from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import { DbAdapter } from '../adapter'
import { ContentTextAdapter, IndexedDoc } from '../types'
import { DocUpdateHandler, FullTextPipeline, FullTextPipelineStage, contentStageId, fieldStateId } from './types'
import { type DbAdapter } from '../adapter'
import { type StorageAdapter } from '../storage'
import { type ContentTextAdapter, type IndexedDoc } from '../types'
import {
type DocUpdateHandler,
type FullTextPipeline,
type FullTextPipelineStage,
contentStageId,
fieldStateId
} from './types'
import { docKey, docUpdKey, getFullTextIndexableAttributes } from './utils'
/**
@ -51,7 +57,7 @@ export class ContentRetrievalStage implements FullTextPipelineStage {
stageValue: boolean | string = true
constructor (
readonly storageAdapter: MinioService | undefined,
readonly storageAdapter: StorageAdapter | undefined,
readonly workspace: WorkspaceId,
readonly metrics: MeasureContext,
private readonly contentAdapter: ContentTextAdapter

View File

@ -14,21 +14,27 @@
//
import core, {
Class,
Doc,
DocIndexState,
DocumentQuery,
DocumentUpdate,
type Class,
type Doc,
type DocIndexState,
type DocumentQuery,
type DocumentUpdate,
extractDocKey,
IndexStageState,
MeasureContext,
Ref,
ServerStorage
type IndexStageState,
type MeasureContext,
type Ref,
type ServerStorage
} from '@hcengineering/core'
import { deepEqual } from 'fast-equals'
import { DbAdapter } from '../adapter'
import { IndexedDoc } from '../types'
import { contentStageId, DocUpdateHandler, fieldStateId, FullTextPipeline, FullTextPipelineStage } from './types'
import { type DbAdapter } from '../adapter'
import { type IndexedDoc } from '../types'
import {
contentStageId,
type DocUpdateHandler,
fieldStateId,
type FullTextPipeline,
type FullTextPipelineStage
} from './types'
import {
collectPropagate,
docKey,

View File

@ -14,31 +14,31 @@
//
import core, {
AnyAttribute,
ArrOf,
Class,
Doc,
DocIndexState,
DocumentQuery,
DocumentUpdate,
type AnyAttribute,
type ArrOf,
type Class,
type Doc,
type DocIndexState,
type DocumentQuery,
type DocumentUpdate,
extractDocKey,
isFullTextAttribute,
isIndexedAttribute,
MeasureContext,
Ref,
ServerStorage,
WorkspaceId
type MeasureContext,
type Ref,
type ServerStorage,
type WorkspaceId
} from '@hcengineering/core'
import { DbAdapter } from '../adapter'
import { type DbAdapter } from '../adapter'
import { updateDocWithPresenter } from '../mapper'
import { FullTextAdapter, IndexedDoc } from '../types'
import { type FullTextAdapter, type IndexedDoc } from '../types'
import { summaryStageId } from './summary'
import {
contentStageId,
DocUpdateHandler,
type DocUpdateHandler,
fieldStateId,
FullTextPipeline,
FullTextPipelineStage,
type FullTextPipeline,
type FullTextPipelineStage,
fullTextPushStageId
} from './types'
import {
@ -46,7 +46,7 @@ import {
collectPropagateClasses,
docKey,
getFullTextContext,
IndexKeyOptions,
type IndexKeyOptions,
isCustomAttr
} from './utils'

View File

@ -14,21 +14,21 @@
//
import core, {
AttachedDoc,
Class,
type AttachedDoc,
type Class,
DOMAIN_DOC_INDEX_STATE,
DOMAIN_FULLTEXT_BLOB,
Doc,
DocIndexState,
DocumentQuery,
DocumentUpdate,
Hierarchy,
MeasureContext,
ModelDb,
Ref,
ServerStorage,
type Doc,
type DocIndexState,
type DocumentQuery,
type DocumentUpdate,
type Hierarchy,
type MeasureContext,
type ModelDb,
type Ref,
type ServerStorage,
TxFactory,
WorkspaceId,
type WorkspaceId,
_getOperator,
docKey,
generateId,
@ -36,10 +36,10 @@ import core, {
toFindResult,
versionToString
} from '@hcengineering/core'
import { DbAdapter } from '../adapter'
import { type DbAdapter } from '../adapter'
import { RateLimiter } from '../limitter'
import type { IndexedDoc } from '../types'
import { FullTextPipeline, FullTextPipelineStage } from './types'
import { type FullTextPipeline, type FullTextPipelineStage } from './types'
import { createStateDoc, isClassIndexable } from './utils'
export * from './content'

View File

@ -14,25 +14,31 @@
//
import core, {
AnyAttribute,
Class,
Doc,
DocIndexState,
DocumentQuery,
DocumentUpdate,
type AnyAttribute,
type Class,
type Doc,
type DocIndexState,
type DocumentQuery,
type DocumentUpdate,
extractDocKey,
Hierarchy,
IndexStageState,
type Hierarchy,
type IndexStageState,
isFullTextAttribute,
MeasureContext,
Ref,
ServerStorage
type MeasureContext,
type Ref,
type ServerStorage
} from '@hcengineering/core'
import { translate } from '@hcengineering/platform'
import { convert } from 'html-to-text'
import { DbAdapter } from '../adapter'
import { IndexedDoc } from '../types'
import { contentStageId, DocUpdateHandler, fieldStateId, FullTextPipeline, FullTextPipelineStage } from './types'
import { type DbAdapter } from '../adapter'
import { type IndexedDoc } from '../types'
import {
contentStageId,
type DocUpdateHandler,
fieldStateId,
type FullTextPipeline,
type FullTextPipelineStage
} from './types'
import {
collectPropagate,
collectPropagateClasses,

View File

@ -14,17 +14,17 @@
//
import {
Class,
Doc,
DocIndexState,
DocumentQuery,
DocumentUpdate,
Hierarchy,
MeasureContext,
ModelDb,
Ref
type Class,
type Doc,
type DocIndexState,
type DocumentQuery,
type DocumentUpdate,
type Hierarchy,
type MeasureContext,
type ModelDb,
type Ref
} from '@hcengineering/core'
import { DbAdapter } from '../adapter'
import { type DbAdapter } from '../adapter'
import type { IndexedDoc } from '../types'
/**

View File

@ -14,36 +14,36 @@
//
import core, {
AnyAttribute,
AttachedDoc,
Class,
type AnyAttribute,
type AttachedDoc,
type Class,
ClassifierKind,
Collection,
Data,
Doc,
DocIndexState,
type Collection,
type Data,
type Doc,
type DocIndexState,
DOMAIN_BLOB,
DOMAIN_DOC_INDEX_STATE,
DOMAIN_FULLTEXT_BLOB,
DOMAIN_MODEL,
DOMAIN_TRANSIENT,
DOMAIN_TX,
FullTextSearchContext,
type FullTextSearchContext,
generateId,
Hierarchy,
IndexStageState,
type Hierarchy,
type IndexStageState,
isFullTextAttribute,
isIndexedAttribute,
MeasureContext,
Obj,
Ref,
Space,
type MeasureContext,
type Obj,
type Ref,
type Space,
TxFactory
} from '@hcengineering/core'
import { deepEqual } from 'fast-equals'
import { DbAdapter } from '../adapter'
import { type DbAdapter } from '../adapter'
import plugin from '../plugin'
import { FullTextPipeline } from './types'
import { type FullTextPipeline } from './types'
/**
* @public
*/

View File

@ -1,8 +1,23 @@
import { Class, Doc, DocIndexState, docKey, Hierarchy, Ref, RefTo, SearchResultDoc } from '@hcengineering/core'
import { getResource, Resource } from '@hcengineering/platform'
import {
type Class,
type Doc,
type DocIndexState,
docKey,
type Hierarchy,
type Ref,
type RefTo,
type SearchResultDoc
} from '@hcengineering/core'
import { getResource, type Resource } from '@hcengineering/platform'
import plugin from './plugin'
import { ClassSearchConfigProps, IndexedDoc, SearchPresenter, SearchPresenterFunc, SearchScoring } from './types'
import {
type ClassSearchConfigProps,
type IndexedDoc,
type SearchPresenter,
type SearchPresenterFunc,
type SearchScoring
} from './types'
interface IndexedReader {
get: (attribute: string) => any

119
server/core/src/mem.ts Normal file
View File

@ -0,0 +1,119 @@
//
// Copyright © 2022 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import {
type Class,
type Doc,
type DocumentQuery,
type DocumentUpdate,
type Domain,
type FindOptions,
type FindResult,
type Hierarchy,
type IndexingConfiguration,
type MeasureContext,
ModelDb,
type Ref,
type StorageIterator,
toFindResult,
type Tx,
type TxResult,
type WorkspaceId
} from '@hcengineering/core'
import { type DbAdapter } from './adapter'
/**
* @public
*/
export class DummyDbAdapter implements DbAdapter {
async init (model: Tx[]): Promise<void> {}
async findAll<T extends Doc>(
ctx: MeasureContext,
_class: Ref<Class<T>>,
query: DocumentQuery<T>,
options?: FindOptions<T> | undefined
): Promise<FindResult<T>> {
return toFindResult([])
}
async createIndexes (domain: Domain, config: Pick<IndexingConfiguration<Doc>, 'indexes'>): Promise<void> {}
async removeOldIndex (domain: Domain, deletePattern: RegExp, keepPattern: RegExp): Promise<void> {}
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
return []
}
async close (): Promise<void> {}
find (domain: Domain): StorageIterator {
return {
next: async () => undefined,
close: async () => {}
}
}
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
return []
}
async upload (domain: Domain, docs: Doc[]): Promise<void> {}
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {}
async update (domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>): Promise<void> {}
}
class InMemoryAdapter extends DummyDbAdapter implements DbAdapter {
private readonly modeldb: ModelDb
constructor (hierarchy: Hierarchy) {
super()
this.modeldb = new ModelDb(hierarchy)
}
async findAll<T extends Doc>(
ctx: MeasureContext,
_class: Ref<Class<T>>,
query: DocumentQuery<T>,
options?: FindOptions<T>
): Promise<FindResult<T>> {
return await this.modeldb.findAll(_class, query, options)
}
async tx (ctx: MeasureContext, ...tx: Tx[]): Promise<TxResult[]> {
return await this.modeldb.tx(...tx)
}
async init (model: Tx[]): Promise<void> {
for (const tx of model) {
try {
await this.modeldb.tx(tx)
} catch (err: any) {
console.error('skip broken TX', err)
}
}
}
}
/**
* @public
*/
export async function createInMemoryAdapter (
hierarchy: Hierarchy,
url: string,
workspaceId: WorkspaceId
): Promise<DbAdapter> {
return new InMemoryAdapter(hierarchy)
}

View File

@ -14,25 +14,33 @@
//
import {
Class,
Doc,
DocumentQuery,
Domain,
FindOptions,
FindResult,
MeasureContext,
ModelDb,
Ref,
SearchOptions,
SearchQuery,
SearchResult,
ServerStorage,
StorageIterator,
Tx,
TxResult
type Class,
type Doc,
type DocumentQuery,
type Domain,
type FindOptions,
type FindResult,
type MeasureContext,
type ModelDb,
type Ref,
type SearchOptions,
type SearchQuery,
type SearchResult,
type ServerStorage,
type StorageIterator,
type Tx,
type TxResult
} from '@hcengineering/core'
import { DbConfiguration, createServerStorage } from './storage'
import { BroadcastFunc, HandledBroadcastFunc, Middleware, MiddlewareCreator, Pipeline, SessionContext } from './types'
import { createServerStorage } from './server'
import { type DbConfiguration } from './configuration'
import {
type BroadcastFunc,
type HandledBroadcastFunc,
type Middleware,
type MiddlewareCreator,
type Pipeline,
type SessionContext
} from './types'
/**
* @public

View File

@ -14,7 +14,7 @@
// limitations under the License.
//
import { Metadata, Plugin, plugin } from '@hcengineering/platform'
import { type Metadata, type Plugin, plugin } from '@hcengineering/platform'
import type { Class, Ref, Space, Mixin } from '@hcengineering/core'
import type { ObjectDDParticipant, SearchPresenter, Trigger } from './types'

View File

@ -0,0 +1,196 @@
//
// Copyright © 2020, 2021 Anticrm Platform Contributors.
// Copyright © 2021 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import core, {
type Class,
DOMAIN_DOC_INDEX_STATE,
DOMAIN_TX,
type Doc,
Hierarchy,
type IndexingUpdateEvent,
type MeasureContext,
ModelDb,
type Ref,
type ServerStorage,
type TxWorkspaceEvent,
WorkspaceEvent,
type WorkspaceId,
generateId
} from '@hcengineering/core'
import { type DbAdapter, type TxAdapter } from '../adapter'
import { type DbConfiguration } from '../configuration'
import { createContentAdapter } from '../content'
import { FullTextIndex } from '../fulltext'
import { FullTextIndexPipeline } from '../indexer'
import { type StorageAdapter } from '../storage'
import { Triggers } from '../triggers'
import { type ServerStorageOptions } from '../types'
import { TServerStorage } from './storage'
/**
* @public
*/
export async function createServerStorage (
ctx: MeasureContext,
conf: DbConfiguration,
options: ServerStorageOptions
): Promise<ServerStorage> {
const hierarchy = new Hierarchy()
const triggers = new Triggers(hierarchy)
const adapters = new Map<string, DbAdapter>()
const modelDb = new ModelDb(hierarchy)
const storageAdapter = conf.storageFactory?.()
for (const key in conf.adapters) {
const adapterConf = conf.adapters[key]
adapters.set(key, await adapterConf.factory(hierarchy, adapterConf.url, conf.workspace, modelDb, storageAdapter))
}
const txAdapter = adapters.get(conf.domains[DOMAIN_TX]) as TxAdapter
const model = await ctx.with('get model', {}, async (ctx) => {
const model = await txAdapter.getModel()
for (const tx of model) {
try {
hierarchy.tx(tx)
await triggers.tx(tx)
} catch (err: any) {
console.error('failed to apply model transaction, skipping', JSON.stringify(tx), err)
}
}
for (const tx of model) {
try {
await modelDb.tx(tx)
} catch (err: any) {
console.error('failed to apply model transaction, skipping', JSON.stringify(tx), err)
}
}
return model
})
for (const [adn, adapter] of adapters) {
await ctx.with('init-adapter', { name: adn }, async (ctx) => {
await adapter.init(model)
})
}
const fulltextAdapter = await ctx.with(
'create full text adapter',
{},
async (ctx) =>
await conf.fulltextAdapter.factory(
conf.fulltextAdapter.url,
conf.workspace,
conf.metrics.newChild('🗒️ fulltext', {})
)
)
const metrics = conf.metrics.newChild('📔 server-storage', {})
const contentAdapter = await ctx.with(
'create content adapter',
{},
async (ctx) =>
await createContentAdapter(
conf.contentAdapters,
conf.defaultContentAdapter,
conf.workspace,
metrics.newChild('content', {})
)
)
const defaultAdapter = adapters.get(conf.defaultAdapter)
if (defaultAdapter === undefined) {
throw new Error(`No Adapter for ${DOMAIN_DOC_INDEX_STATE}`)
}
const indexFactory = (storage: ServerStorage): FullTextIndex => {
if (storageAdapter === undefined) {
throw new Error('No storage adapter')
}
const stages = conf.fulltextAdapter.stages(fulltextAdapter, storage, storageAdapter, contentAdapter)
const indexer = new FullTextIndexPipeline(
defaultAdapter,
stages,
hierarchy,
conf.workspace,
metrics.newChild('fulltext', {}),
modelDb,
(classes: Ref<Class<Doc>>[]) => {
const evt: IndexingUpdateEvent = {
_class: classes
}
const tx: TxWorkspaceEvent = {
_class: core.class.TxWorkspaceEvent,
_id: generateId(),
event: WorkspaceEvent.IndexingUpdate,
modifiedBy: core.account.System,
modifiedOn: Date.now(),
objectSpace: core.space.DerivedTx,
space: core.space.DerivedTx,
params: evt
}
options.broadcast?.([tx])
}
)
return new FullTextIndex(
hierarchy,
fulltextAdapter,
storage,
storageAdapter,
conf.workspace,
indexer,
options.upgrade ?? false
)
}
return new TServerStorage(
conf.domains,
conf.defaultAdapter,
adapters,
hierarchy,
triggers,
fulltextAdapter,
storageAdapter,
modelDb,
conf.workspace,
indexFactory,
options,
metrics,
model
)
}
/**
* @public
*/
export function createNullStorageFactory (): StorageAdapter {
return {
exists: async (workspaceId: WorkspaceId) => {
return false
},
make: async (workspaceId: WorkspaceId) => {},
remove: async (workspaceId: WorkspaceId, objectNames: string[]) => {},
delete: async (workspaceId: WorkspaceId) => {},
list: async (workspaceId: WorkspaceId, prefix?: string) => [],
stat: async (workspaceId: WorkspaceId, objectName: string) => ({}) as any,
get: async (workspaceId: WorkspaceId, objectName: string) => ({}) as any,
put: async (workspaceId: WorkspaceId, objectName: string, stream: any, size?: number, qwe?: any) => ({}) as any,
read: async (workspaceId: WorkspaceId, name: string) => ({}) as any,
partial: async (workspaceId: WorkspaceId, objectName: string, offset: number, length?: number) => ({}) as any
}
}

View File

@ -0,0 +1,835 @@
//
// Copyright © 2020, 2021 Anticrm Platform Contributors.
// Copyright © 2021 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import core, {
type Account,
type AttachedDoc,
type Class,
ClassifierKind,
type Client,
type Collection,
DOMAIN_MODEL,
DOMAIN_TRANSIENT,
DOMAIN_TX,
type Doc,
type DocumentQuery,
type DocumentUpdate,
type Domain,
type FindOptions,
type FindResult,
type Hierarchy,
type LoadModelResponse,
type MeasureContext,
type Mixin,
type ModelDb,
type Ref,
type SearchOptions,
type SearchQuery,
type SearchResult,
type ServerStorage,
type StorageIterator,
type Timestamp,
type Tx,
type TxApplyIf,
type TxCUD,
type TxCollectionCUD,
TxFactory,
TxProcessor,
type TxRemoveDoc,
type TxResult,
type TxUpdateDoc,
type WorkspaceIdWithUrl,
toFindResult
} from '@hcengineering/core'
import { type Metadata, getResource } from '@hcengineering/platform'
import { LiveQuery as LQ } from '@hcengineering/query'
import crypto from 'node:crypto'
import { type DbAdapter } from '../adapter'
import { type FullTextIndex } from '../fulltext'
import serverCore from '../plugin'
import { type Triggers } from '../triggers'
import type { FullTextAdapter, ObjectDDParticipant, ServerStorageOptions, TriggerControl } from '../types'
import { type StorageAdapter } from '../storage'
export class TServerStorage implements ServerStorage {
private readonly fulltext: FullTextIndex
hierarchy: Hierarchy
scopes = new Map<string, Promise<any>>()
hashes!: string[]
triggerData = new Map<Metadata<any>, any>()
liveQuery: LQ
constructor (
private readonly _domains: Record<string, string>,
private readonly defaultAdapter: string,
private readonly adapters: Map<string, DbAdapter>,
hierarchy: Hierarchy,
private readonly triggers: Triggers,
private readonly fulltextAdapter: FullTextAdapter,
readonly storageAdapter: StorageAdapter | undefined,
readonly modelDb: ModelDb,
private readonly workspace: WorkspaceIdWithUrl,
readonly indexFactory: (storage: ServerStorage) => FullTextIndex,
readonly options: ServerStorageOptions,
metrics: MeasureContext,
readonly model: Tx[]
) {
this.liveQuery = new LQ(this.newCastClient(hierarchy, modelDb, metrics))
this.hierarchy = hierarchy
this.fulltext = indexFactory(this)
this.setModel(model)
}
private newCastClient (hierarchy: Hierarchy, modelDb: ModelDb, metrics: MeasureContext): Client {
return {
getHierarchy (): Hierarchy {
return hierarchy
},
getModel (): ModelDb {
return modelDb
},
close: async () => {},
findAll: async (_class, query, options) => {
return await metrics.with('query', {}, async (ctx) => {
const results = await this.findAll(ctx, _class, query, options)
return toFindResult(
results.map((v) => {
return this.hierarchy.updateLookupMixin(_class, v, options)
}),
results.total
)
})
},
findOne: async (_class, query, options) => {
return (
await metrics.with('query', {}, async (ctx) => {
return await this.findAll(ctx, _class, query, { ...options, limit: 1 })
})
)[0]
},
tx: async (tx) => {
return {}
},
searchFulltext: async (query: SearchQuery, options: SearchOptions) => {
return await metrics.with('query', {}, async (ctx) => await this.searchFulltext(ctx, query, options))
}
}
}
async close (): Promise<void> {
console.timeLog(this.workspace.name, 'closing')
await this.fulltext.close()
console.timeLog(this.workspace.name, 'closing adapters')
for (const o of this.adapters.values()) {
await o.close()
}
console.timeLog(this.workspace.name, 'closing fulltext')
await this.fulltextAdapter.close()
}
private getAdapter (domain: Domain): DbAdapter {
const name = this._domains[domain] ?? this.defaultAdapter
const adapter = this.adapters.get(name)
if (adapter === undefined) {
throw new Error('adapter not provided: ' + name)
}
return adapter
}
private async routeTx (ctx: MeasureContext, removedDocs: Map<Ref<Doc>, Doc>, ...txes: Tx[]): Promise<TxResult[]> {
let part: TxCUD<Doc>[] = []
let lastDomain: Domain | undefined
const result: TxResult[] = []
const processPart = async (): Promise<void> => {
if (part.length > 0) {
// Find all deleted documents
const adapter = this.getAdapter(lastDomain as Domain)
const toDelete = part.filter((it) => it._class === core.class.TxRemoveDoc).map((it) => it.objectId)
if (toDelete.length > 0) {
const toDeleteDocs = await ctx.with(
'adapter-load',
{ domain: lastDomain },
async () => await adapter.load(lastDomain as Domain, toDelete)
)
for (const ddoc of toDeleteDocs) {
removedDocs.set(ddoc._id, ddoc)
}
}
const r = await ctx.with('adapter-tx', { domain: lastDomain }, async (ctx) => await adapter.tx(ctx, ...part))
// Update server live queries.
for (const t of part) {
await this.liveQuery.tx(t)
}
if (Array.isArray(r)) {
result.push(...r)
} else {
result.push(r)
}
part = []
}
}
for (const tx of txes) {
const txCUD = TxProcessor.extractTx(tx) as TxCUD<Doc>
if (!this.hierarchy.isDerived(txCUD._class, core.class.TxCUD)) {
// Skip unsupported tx
console.error('Unsupported transaction', tx)
continue
}
const domain = this.hierarchy.getDomain(txCUD.objectClass)
if (part.length > 0) {
if (lastDomain !== domain) {
await processPart()
}
lastDomain = domain
part.push(txCUD)
} else {
lastDomain = domain
part.push(txCUD)
}
}
await processPart()
return result
}
private async getCollectionUpdateTx<D extends Doc>(
_id: Ref<D>,
_class: Ref<Class<D>>,
modifiedBy: Ref<Account>,
modifiedOn: number,
attachedTo: D,
update: DocumentUpdate<D>
): Promise<Tx> {
const txFactory = new TxFactory(modifiedBy, true)
const baseClass = this.hierarchy.getBaseClass(_class)
if (baseClass !== _class) {
// Mixin operation is required.
const tx = txFactory.createTxMixin(_id, attachedTo._class, attachedTo.space, _class, update)
tx.modifiedOn = modifiedOn
return tx
} else {
const tx = txFactory.createTxUpdateDoc(_class, attachedTo.space, _id, update)
tx.modifiedOn = modifiedOn
return tx
}
}
private async updateCollection (ctx: MeasureContext, tx: Tx, findAll: ServerStorage['findAll']): Promise<Tx[]> {
if (tx._class !== core.class.TxCollectionCUD) {
return []
}
const colTx = tx as TxCollectionCUD<Doc, AttachedDoc>
const _id = colTx.objectId
const _class = colTx.objectClass
const { operations } = colTx.tx as TxUpdateDoc<AttachedDoc>
if (
colTx.tx._class !== core.class.TxUpdateDoc ||
this.hierarchy.getDomain(_class) === DOMAIN_MODEL // We could not update increments for model classes
) {
return []
}
if (operations?.attachedTo === undefined || operations.attachedTo === _id) {
return []
}
const oldAttachedTo = (await findAll(ctx, _class, { _id }, { limit: 1 }))[0]
let oldTx: Tx | null = null
if (oldAttachedTo !== undefined) {
const attr = this.hierarchy.findAttribute(oldAttachedTo._class, colTx.collection)
if (attr !== undefined) {
oldTx = await this.getCollectionUpdateTx(_id, _class, tx.modifiedBy, colTx.modifiedOn, oldAttachedTo, {
$inc: { [colTx.collection]: -1 }
})
}
}
const newAttachedToClass = operations.attachedToClass ?? _class
const newAttachedToCollection = operations.collection ?? colTx.collection
const newAttachedTo = (await findAll(ctx, newAttachedToClass, { _id: operations.attachedTo }, { limit: 1 }))[0]
let newTx: Tx | null = null
const newAttr = this.hierarchy.findAttribute(newAttachedToClass, newAttachedToCollection)
if (newAttachedTo !== undefined && newAttr !== undefined) {
newTx = await this.getCollectionUpdateTx(
newAttachedTo._id,
newAttachedTo._class,
tx.modifiedBy,
colTx.modifiedOn,
newAttachedTo,
{ $inc: { [newAttachedToCollection]: 1 } }
)
}
return [...(oldTx !== null ? [oldTx] : []), ...(newTx !== null ? [newTx] : [])]
}
private async processCollection (
ctx: MeasureContext,
txes: Tx[],
findAll: ServerStorage['findAll'],
removedMap: Map<Ref<Doc>, Doc>
): Promise<Tx[]> {
const result: Tx[] = []
for (const tx of txes) {
if (tx._class === core.class.TxCollectionCUD) {
const colTx = tx as TxCollectionCUD<Doc, AttachedDoc>
const _id = colTx.objectId
const _class = colTx.objectClass
// Skip model operations
if (this.hierarchy.getDomain(_class) === DOMAIN_MODEL) {
// We could not update increments for model classes
continue
}
const isCreateTx = colTx.tx._class === core.class.TxCreateDoc
const isDeleteTx = colTx.tx._class === core.class.TxRemoveDoc
const isUpdateTx = colTx.tx._class === core.class.TxUpdateDoc
if (isUpdateTx) {
result.push(...(await this.updateCollection(ctx, tx, findAll)))
}
if ((isCreateTx || isDeleteTx) && !removedMap.has(_id)) {
const attachedTo = (await findAll(ctx, _class, { _id }, { limit: 1 }))[0]
if (attachedTo !== undefined) {
result.push(
await this.getCollectionUpdateTx(_id, _class, tx.modifiedBy, colTx.modifiedOn, attachedTo, {
$inc: { [colTx.collection]: isCreateTx ? 1 : -1 }
})
)
}
}
}
}
return result
}
private addModelTx (tx: Tx): void {
this.model.push(tx)
const h = crypto.createHash('sha1')
h.update(this.hashes[this.hashes.length - 1])
h.update(JSON.stringify(tx))
this.hashes.push(h.digest('hex'))
}
private setModel (model: Tx[]): void {
let prev = ''
this.hashes = model.map((it) => {
const h = crypto.createHash('sha1')
h.update(prev)
h.update(JSON.stringify(it))
prev = h.digest('hex')
return prev
})
}
async loadModel (lastModelTx: Timestamp, hash?: string): Promise<Tx[] | LoadModelResponse> {
if (hash !== undefined) {
const pos = this.hashes.indexOf(hash)
if (pos >= 0) {
return {
full: false,
hash: this.hashes[this.hashes.length - 1],
transactions: this.model.slice(pos + 1)
}
}
return {
full: true,
hash: this.hashes[this.hashes.length - 1],
transactions: [...this.model]
}
}
return this.model.filter((it) => it.modifiedOn > lastModelTx)
}
async findAll<T extends Doc>(
ctx: MeasureContext,
clazz: Ref<Class<T>>,
query: DocumentQuery<T>,
options?: FindOptions<T> & {
domain?: Domain // Allow to find for Doc's in specified domain only.
prefix?: string
}
): Promise<FindResult<T>> {
const p = options?.prefix ?? 'client'
const domain = options?.domain ?? this.hierarchy.getDomain(clazz)
if (query?.$search !== undefined) {
return await ctx.with(p + '-fulltext-find-all', {}, (ctx) => this.fulltext.findAll(ctx, clazz, query, options))
}
const st = Date.now()
const result = await ctx.with(
p + '-find-all',
{ _class: clazz },
(ctx) => {
return this.getAdapter(domain).findAll(ctx, clazz, query, options)
},
{ clazz, query, options }
)
if (Date.now() - st > 1000) {
console.error('FindAll', Date.now() - st, clazz, query, options)
}
return result
}
async searchFulltext (ctx: MeasureContext, query: SearchQuery, options: SearchOptions): Promise<SearchResult> {
return await ctx.with('full-text-search', {}, (ctx) => {
return this.fulltext.searchFulltext(ctx, query, options)
})
}
private getParentClass (_class: Ref<Class<Doc>>): Ref<Class<Doc>> {
const baseDomain = this.hierarchy.getDomain(_class)
const ancestors = this.hierarchy.getAncestors(_class)
let result: Ref<Class<Doc>> = _class
for (const ancestor of ancestors) {
try {
const domain = this.hierarchy.getClass(ancestor).domain
if (domain === baseDomain) {
result = ancestor
}
} catch {}
}
return result
}
private getMixins (_class: Ref<Class<Doc>>, object: Doc): Array<Ref<Mixin<Doc>>> {
const parentClass = this.getParentClass(_class)
const descendants = this.hierarchy.getDescendants(parentClass)
return descendants.filter(
(m) => this.hierarchy.getClass(m).kind === ClassifierKind.MIXIN && this.hierarchy.hasMixin(object, m)
)
}
private async processRemove (
ctx: MeasureContext,
txes: Tx[],
findAll: ServerStorage['findAll'],
removedMap: Map<Ref<Doc>, Doc>
): Promise<Tx[]> {
const result: Tx[] = []
for (const tx of txes) {
const actualTx = TxProcessor.extractTx(tx)
if (!this.hierarchy.isDerived(actualTx._class, core.class.TxRemoveDoc)) {
continue
}
const rtx = actualTx as TxRemoveDoc<Doc>
const object = removedMap.get(rtx.objectId)
if (object === undefined) {
continue
}
result.push(...(await this.deleteClassCollections(ctx, object._class, rtx.objectId, findAll, removedMap)))
const mixins = this.getMixins(object._class, object)
for (const mixin of mixins) {
result.push(
...(await this.deleteClassCollections(ctx, mixin, rtx.objectId, findAll, removedMap, object._class))
)
}
result.push(...(await this.deleteRelatedDocuments(ctx, object, findAll, removedMap)))
}
return result
}
private async deleteClassCollections (
ctx: MeasureContext,
_class: Ref<Class<Doc>>,
objectId: Ref<Doc>,
findAll: ServerStorage['findAll'],
removedMap: Map<Ref<Doc>, Doc>,
to?: Ref<Class<Doc>>
): Promise<Tx[]> {
const attributes = this.hierarchy.getAllAttributes(_class, to)
const result: Tx[] = []
for (const attribute of attributes) {
if (this.hierarchy.isDerived(attribute[1].type._class, core.class.Collection)) {
const collection = attribute[1].type as Collection<AttachedDoc>
const allAttached = await findAll(ctx, collection.of, { attachedTo: objectId })
for (const attached of allAttached) {
result.push(...this.deleteObject(ctx, attached, removedMap))
}
}
}
return result
}
private deleteObject (ctx: MeasureContext, object: Doc, removedMap: Map<Ref<Doc>, Doc>): Tx[] {
const result: Tx[] = []
const factory = new TxFactory(object.modifiedBy, true)
if (this.hierarchy.isDerived(object._class, core.class.AttachedDoc)) {
const adoc = object as AttachedDoc
const nestedTx = factory.createTxRemoveDoc(adoc._class, adoc.space, adoc._id)
const tx = factory.createTxCollectionCUD(
adoc.attachedToClass,
adoc.attachedTo,
adoc.space,
adoc.collection,
nestedTx
)
removedMap.set(adoc._id, adoc)
result.push(tx)
} else {
result.push(factory.createTxRemoveDoc(object._class, object.space, object._id))
removedMap.set(object._id, object)
}
return result
}
private async deleteRelatedDocuments (
ctx: MeasureContext,
object: Doc,
findAll: ServerStorage['findAll'],
removedMap: Map<Ref<Doc>, Doc>
): Promise<Tx[]> {
const result: Tx[] = []
const objectClass = this.hierarchy.getClass(object._class)
if (this.hierarchy.hasMixin(objectClass, serverCore.mixin.ObjectDDParticipant)) {
const removeParticipand: ObjectDDParticipant = this.hierarchy.as(
objectClass,
serverCore.mixin.ObjectDDParticipant
)
const collector = await getResource(removeParticipand.collectDocs)
const docs = await collector(object, this.hierarchy, async (_class, query, options) => {
return await findAll(ctx, _class, query, options)
})
for (const d of docs) {
result.push(...this.deleteObject(ctx, d, removedMap))
}
}
return result
}
private async processMove (ctx: MeasureContext, txes: Tx[], findAll: ServerStorage['findAll']): Promise<Tx[]> {
const result: Tx[] = []
for (const tx of txes) {
const actualTx = TxProcessor.extractTx(tx)
if (!this.hierarchy.isDerived(actualTx._class, core.class.TxUpdateDoc)) {
continue
}
const rtx = actualTx as TxUpdateDoc<Doc>
if (rtx.operations.space === undefined || rtx.operations.space === rtx.objectSpace) {
continue
}
const factory = new TxFactory(tx.modifiedBy, true)
for (const [, attribute] of this.hierarchy.getAllAttributes(rtx.objectClass)) {
if (!this.hierarchy.isDerived(attribute.type._class, core.class.Collection)) {
continue
}
const collection = attribute.type as Collection<AttachedDoc>
const allAttached = await findAll(ctx, collection.of, { attachedTo: rtx.objectId, space: rtx.objectSpace })
const allTx = allAttached.map(({ _class, space, _id }) =>
factory.createTxUpdateDoc(_class, space, _id, { space: rtx.operations.space })
)
result.push(...allTx)
}
}
return result
}
private async processDerived (
ctx: MeasureContext,
txes: Tx[],
triggerFx: Effects,
findAll: ServerStorage['findAll'],
removedMap: Map<Ref<Doc>, Doc>
): Promise<Tx[]> {
const fAll =
(mctx: MeasureContext) =>
<T extends Doc>(
clazz: Ref<Class<T>>,
query: DocumentQuery<T>,
options?: FindOptions<T>
): Promise<FindResult<T>> =>
findAll(mctx, clazz, query, options)
const removed = await ctx.with('process-remove', {}, (ctx) => this.processRemove(ctx, txes, findAll, removedMap))
const collections = await ctx.with('process-collection', {}, (ctx) =>
this.processCollection(ctx, txes, findAll, removedMap)
)
const moves = await ctx.with('process-move', {}, (ctx) => this.processMove(ctx, txes, findAll))
const triggerControl: Omit<TriggerControl, 'txFactory' | 'ctx' | 'result'> = {
removedMap,
workspace: this.workspace,
fx: triggerFx.fx,
fulltextFx: (f) => {
triggerFx.fx(() => f(this.fulltextAdapter))
},
storageFx: (f) => {
const adapter = this.storageAdapter
if (adapter === undefined) {
return
}
triggerFx.fx(() => f(adapter, this.workspace))
},
findAll: fAll(ctx),
findAllCtx: findAll,
modelDb: this.modelDb,
hierarchy: this.hierarchy,
apply: async (tx, broadcast, target) => {
return await this.apply(ctx, tx, broadcast, target)
},
applyCtx: async (ctx, tx, broadcast, target) => {
return await this.apply(ctx, tx, broadcast, target)
},
// Will create a live query if missing and return values immediately if already asked.
queryFind: async (_class, query, options) => {
return await this.liveQuery.queryFind(_class, query, options)
}
}
const triggers = await ctx.with('process-triggers', {}, async (ctx) => {
const result: Tx[] = []
result.push(
...(await this.triggers.apply(ctx, txes, {
...triggerControl,
ctx,
findAll: fAll(ctx),
result
}))
)
return result
})
const derived = [...removed, ...collections, ...moves, ...triggers]
return await this.processDerivedTxes(derived, ctx, triggerFx, findAll, removedMap)
}
private async processDerivedTxes (
derived: Tx[],
ctx: MeasureContext,
triggerFx: Effects,
findAll: ServerStorage['findAll'],
removedMap: Map<Ref<Doc>, Doc>
): Promise<Tx[]> {
derived.sort((a, b) => a.modifiedOn - b.modifiedOn)
await ctx.with('derived-route-tx', {}, (ctx) => this.routeTx(ctx, removedMap, ...derived))
const nestedTxes: Tx[] = []
if (derived.length > 0) {
nestedTxes.push(...(await this.processDerived(ctx, derived, triggerFx, findAll, removedMap)))
}
const res = [...derived, ...nestedTxes]
return res
}
/**
* Verify if apply if is possible to apply.
*/
async verifyApplyIf (
ctx: MeasureContext,
applyIf: TxApplyIf,
findAll: ServerStorage['findAll']
): Promise<{
onEnd: () => void
passed: boolean
}> {
// Wait for synchronized.
;(await this.scopes.get(applyIf.scope)) ?? Promise.resolve()
let onEnd = (): void => {}
// Put sync code
this.scopes.set(
applyIf.scope,
new Promise((resolve) => {
onEnd = () => {
this.scopes.delete(applyIf.scope)
resolve(null)
}
})
)
let passed = true
for (const { _class, query } of applyIf.match) {
const res = await findAll(ctx, _class, query, { limit: 1 })
if (res.length === 0) {
passed = false
break
}
}
if (passed) {
for (const { _class, query } of applyIf.notMatch) {
const res = await findAll(ctx, _class, query, { limit: 1 })
if (res.length > 0) {
passed = false
break
}
}
}
return { passed, onEnd }
}
async apply (ctx: MeasureContext, txes: Tx[], broadcast: boolean, target?: string[]): Promise<TxResult[]> {
const result = await this.processTxes(ctx, txes)
let derived: Tx[] = []
derived = result[1]
if (broadcast) {
this.options?.broadcast?.([...txes, ...derived], target)
}
return result[0]
}
fillTxes (txes: Tx[], txToStore: Tx[], modelTx: Tx[], txToProcess: Tx[], applyTxes: Tx[]): void {
for (const tx of txes) {
if (!this.hierarchy.isDerived(tx._class, core.class.TxApplyIf)) {
if (tx.space !== core.space.DerivedTx) {
if (this.hierarchy.isDerived(tx._class, core.class.TxCUD)) {
if (this.hierarchy.findDomain((tx as TxCUD<Doc>).objectClass) !== DOMAIN_TRANSIENT) {
txToStore.push(tx)
}
} else {
txToStore.push(tx)
}
}
if (tx.objectSpace === core.space.Model) {
modelTx.push(tx)
}
txToProcess.push(tx)
} else {
applyTxes.push(tx)
}
}
}
async processTxes (ctx: MeasureContext, txes: Tx[]): Promise<[TxResult[], Tx[]]> {
// store tx
const _findAll: ServerStorage['findAll'] = async <T extends Doc>(
ctx: MeasureContext,
clazz: Ref<Class<T>>,
query: DocumentQuery<T>,
options?: FindOptions<T>
): Promise<FindResult<T>> => {
return await this.findAll(ctx, clazz, query, { ...options, prefix: 'server' })
}
const txToStore: Tx[] = []
const modelTx: Tx[] = []
const applyTxes: Tx[] = []
const txToProcess: Tx[] = []
const triggerFx = new Effects()
const removedMap = new Map<Ref<Doc>, Doc>()
const onEnds: (() => void)[] = []
const result: TxResult[] = []
let derived: Tx[] = []
try {
this.fillTxes(txes, txToStore, modelTx, txToProcess, applyTxes)
for (const tx of applyTxes) {
const applyIf = tx as TxApplyIf
// Wait for scope promise if found
const passed = await this.verifyApplyIf(ctx, applyIf, _findAll)
onEnds.push(passed.onEnd)
if (passed.passed) {
result.push({
derived: [],
success: true
})
this.fillTxes(applyIf.txes, txToStore, modelTx, txToProcess, applyTxes)
derived = [...applyIf.txes]
} else {
result.push({
derived: [],
success: false
})
}
}
for (const tx of modelTx) {
this.addModelTx(tx)
// maintain hierarchy and triggers
this.hierarchy.tx(tx)
await this.triggers.tx(tx)
await this.modelDb.tx(tx)
}
await ctx.with('domain-tx', {}, async (ctx) => await this.getAdapter(DOMAIN_TX).tx(ctx, ...txToStore))
result.push(...(await ctx.with('apply', {}, (ctx) => this.routeTx(ctx, removedMap, ...txToProcess))))
// invoke triggers and store derived objects
derived = derived.concat(await this.processDerived(ctx, txToProcess, triggerFx, _findAll, removedMap))
// index object
await ctx.with('fulltext-tx', {}, async (ctx) => {
await this.fulltext.tx(ctx, [...txToProcess, ...derived])
})
for (const fx of triggerFx.effects) {
await fx()
}
} catch (err: any) {
console.log(err)
throw err
} finally {
onEnds.forEach((p) => {
p()
})
}
return [result, derived]
}
async tx (ctx: MeasureContext, tx: Tx): Promise<[TxResult, Tx[]]> {
return await ctx.with('client-tx', { _class: tx._class }, async (ctx) => {
const result = await this.processTxes(ctx, [tx])
return [result[0][0], result[1]]
})
}
find (domain: Domain): StorageIterator {
return this.getAdapter(domain).find(domain)
}
async load (domain: Domain, docs: Ref<Doc>[]): Promise<Doc[]> {
return await this.getAdapter(domain).load(domain, docs)
}
async upload (domain: Domain, docs: Doc[]): Promise<void> {
await this.getAdapter(domain).upload(domain, docs)
}
async clean (domain: Domain, docs: Ref<Doc>[]): Promise<void> {
await this.getAdapter(domain).clean(domain, docs)
}
}
type Effect = () => Promise<void>
class Effects {
private readonly _effects: Effect[] = []
public fx = (f: Effect): void => {
this._effects.push(f)
}
get effects (): Effect[] {
return [...this._effects]
}
}

File diff suppressed because it is too large Load Diff

View File

@ -15,22 +15,22 @@
//
import core, {
AttachedDoc,
Class,
Doc,
DocumentQuery,
Hierarchy,
MeasureContext,
Obj,
Ref,
Tx,
TxCollectionCUD,
TxCreateDoc,
type AttachedDoc,
type Class,
type Doc,
type DocumentQuery,
type Hierarchy,
type MeasureContext,
type Obj,
type Ref,
type Tx,
type TxCollectionCUD,
type TxCreateDoc,
TxFactory,
matchQuery
} from '@hcengineering/core'
import { Resource, getResource } from '@hcengineering/platform'
import { type Resource, getResource } from '@hcengineering/platform'
import type { Trigger, TriggerControl, TriggerFunc } from './types'
import serverCore from './plugin'

View File

@ -14,35 +14,35 @@
//
import {
Account,
Class,
Doc,
DocumentQuery,
FindOptions,
FindResult,
Hierarchy,
LowLevelStorage,
MeasureContext,
type Account,
type Class,
type Doc,
type DocumentQuery,
type FindOptions,
type FindResult,
type Hierarchy,
type LowLevelStorage,
type MeasureContext,
MeasureMetricsContext,
ModelDb,
Obj,
Ref,
SearchOptions,
SearchQuery,
SearchResult,
ServerStorage,
Space,
Storage,
Timestamp,
Tx,
TxFactory,
TxResult,
WorkspaceId,
WorkspaceIdWithUrl
type ModelDb,
type Obj,
type Ref,
type SearchOptions,
type SearchQuery,
type SearchResult,
type ServerStorage,
type Space,
type Storage,
type Timestamp,
type Tx,
type TxFactory,
type TxResult,
type WorkspaceId,
type WorkspaceIdWithUrl
} from '@hcengineering/core'
import { MinioService } from '@hcengineering/minio'
import type { Asset, Resource } from '@hcengineering/platform'
import { Readable } from 'stream'
import { type StorageAdapter } from './storage'
import { type Readable } from 'stream'
/**
* @public
@ -202,11 +202,6 @@ export interface SearchStringResult {
total?: number
}
/**
* @public
*/
export type StorageAdapter = MinioService
/**
* @public
*/
@ -303,15 +298,6 @@ export type FullTextAdapterFactory = (
context: MeasureContext
) => Promise<FullTextAdapter>
/**
* @public
*/
export interface ContentTextAdapterConfiguration {
factory: ContentTextAdapterFactory
contentType: string
url: string
}
/**
* @public
*/
@ -412,3 +398,16 @@ export interface SearchPresenter extends Class<Doc> {
getSearchShortTitle?: SearchPresenterFunc
getSearchTitle?: SearchPresenterFunc
}
/**
* @public
*/
export interface ServerStorageOptions {
// If defined, will skip update of attached documents on document update.
skipUpdateAttached?: boolean
// Indexing is not required to be started for upgrade mode.
upgrade: boolean
broadcast?: BroadcastFunc
}

View File

@ -1,5 +1,5 @@
{
"extends": "./node_modules/@hcengineering/platform-rig/profiles/default/tsconfig.json",
"extends": "./node_modules/@hcengineering/platform-rig/profiles/node/tsconfig.json",
"compilerOptions": {
"rootDir": "./src",

View File

@ -1,5 +1,5 @@
module.exports = {
extends: ['./node_modules/@hcengineering/platform-rig/profiles/default/eslint.config.json'],
extends: ['./node_modules/@hcengineering/platform-rig/profiles/node/eslint.config.json'],
parserOptions: {
tsconfigRootDir: __dirname,
project: './tsconfig.json'

View File

@ -1,4 +1,5 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
"rigPackageName": "@hcengineering/platform-rig"
"rigPackageName": "@hcengineering/platform-rig",
"rigProfile": "node"
}

View File

@ -5,14 +5,15 @@
"svelte": "src/index.ts",
"types": "types/index.d.ts",
"author": "Anticrm Platform Contributors",
"template": "@hcengineering/node-package",
"license": "EPL-2.0",
"scripts": {
"build": "compile",
"build:watch": "compile",
"test": "jest --passWithNoTests --silent",
"test": "jest --passWithNoTests --silent --forceExit",
"format": "format src",
"_phase:build": "compile transpile src",
"_phase:test": "jest --passWithNoTests --silent",
"_phase:test": "jest --passWithNoTests --silent --forceExit",
"_phase:format": "format src",
"_phase:validate": "compile validate"
},
@ -35,6 +36,7 @@
"dependencies": {
"@types/minio": "~7.0.11",
"@hcengineering/core": "^0.6.28",
"@hcengineering/server-core": "^0.6.1",
"minio": "^7.0.26"
}
}

View File

@ -13,16 +13,12 @@
// limitations under the License.
//
import { BucketItem, BucketItemStat, Client, ItemBucketMetadata, UploadedObjectInfo } from 'minio'
import { Client, type BucketItemStat, type ItemBucketMetadata, type UploadedObjectInfo } from 'minio'
import { Readable as ReadableStream } from 'stream'
import { toWorkspaceString, type WorkspaceId } from '@hcengineering/core'
import { toWorkspaceString, WorkspaceId } from '@hcengineering/core'
/**
* @public
*/
export type MinioWorkspaceItem = Required<BucketItem> & { metaData: ItemBucketMetadata }
import { type StorageAdapter, type WorkspaceItem } from '@hcengineering/server-core'
import { type Readable } from 'stream'
/**
* @public
@ -34,7 +30,7 @@ export function getBucketId (workspaceId: WorkspaceId): string {
/**
* @public
*/
export class MinioService {
export class MinioService implements StorageAdapter {
client: Client
constructor (opt: { endPoint: string, port: number, accessKey: string, secretKey: string, useSSL: boolean }) {
this.client = new Client(opt)
@ -56,9 +52,9 @@ export class MinioService {
await this.client.removeBucket(getBucketId(workspaceId))
}
async list (workspaceId: WorkspaceId, prefix?: string): Promise<MinioWorkspaceItem[]> {
async list (workspaceId: WorkspaceId, prefix?: string): Promise<WorkspaceItem[]> {
try {
const items = new Map<string, MinioWorkspaceItem>()
const items = new Map<string, WorkspaceItem>()
const list = this.client.listObjects(getBucketId(workspaceId), prefix, true)
await new Promise((resolve) => {
list.on('data', (data) => {
@ -84,14 +80,14 @@ export class MinioService {
return await this.client.statObject(getBucketId(workspaceId), objectName)
}
async get (workspaceId: WorkspaceId, objectName: string): Promise<ReadableStream> {
async get (workspaceId: WorkspaceId, objectName: string): Promise<Readable> {
return await this.client.getObject(getBucketId(workspaceId), objectName)
}
async put (
workspaceId: WorkspaceId,
objectName: string,
stream: ReadableStream | Buffer | string,
stream: Readable | Buffer | string,
size?: number,
metaData?: ItemBucketMetadata
): Promise<UploadedObjectInfo> {
@ -119,12 +115,7 @@ export class MinioService {
return chunks
}
async partial (
workspaceId: WorkspaceId,
objectName: string,
offset: number,
length?: number
): Promise<ReadableStream> {
async partial (workspaceId: WorkspaceId, objectName: string, offset: number, length?: number): Promise<Readable> {
return await this.client.getPartialObject(getBucketId(workspaceId), objectName, offset, length)
}
}

View File

@ -1,5 +1,5 @@
{
"extends": "./node_modules/@hcengineering/platform-rig/profiles/default/tsconfig.json",
"extends": "./node_modules/@hcengineering/platform-rig/profiles/node/tsconfig.json",
"compilerOptions": {
"rootDir": "./src",

View File

@ -45,7 +45,6 @@
"elastic-apm-node": "~3.26.0",
"@hcengineering/server-token": "^0.6.7",
"@hcengineering/middleware": "^0.6.0",
"@hcengineering/minio": "^0.6.0",
"@hcengineering/text": "^0.6.1",
"got": "^11.8.3"
}

View File

@ -33,13 +33,12 @@ import core, {
TxResult,
WorkspaceId
} from '@hcengineering/core'
import { MinioService, MinioWorkspaceItem } from '@hcengineering/minio'
import { DbAdapter } from '@hcengineering/server-core'
import { DbAdapter, StorageAdapter, WorkspaceItem } from '@hcengineering/server-core'
class MinioBlobAdapter implements DbAdapter {
class StorageBlobAdapter implements DbAdapter {
constructor (
readonly workspaceId: WorkspaceId,
readonly client: MinioService
readonly client: StorageAdapter
) {}
async findAll<T extends Doc>(
@ -63,14 +62,14 @@ class MinioBlobAdapter implements DbAdapter {
async close (): Promise<void> {}
find (domain: Domain): StorageIterator {
let listRecieved = false
let items: MinioWorkspaceItem[] = []
let listReceived = false
let items: WorkspaceItem[] = []
let pos = 0
return {
next: async () => {
if (!listRecieved) {
if (!listReceived) {
items = await this.client.list(this.workspaceId)
listRecieved = true
listReceived = true
}
if (pos < items?.length) {
const item = items[pos]
@ -143,15 +142,15 @@ class MinioBlobAdapter implements DbAdapter {
/**
* @public
*/
export async function createMinioDataAdapter (
export async function createStorageDataAdapter (
hierarchy: Hierarchy,
url: string,
workspaceId: WorkspaceId,
modelDb: ModelDb,
storage?: MinioService
storage?: StorageAdapter
): Promise<DbAdapter> {
if (storage === undefined) {
throw new Error('minio storage adapter require minio')
}
return new MinioBlobAdapter(workspaceId, storage)
return new StorageBlobAdapter(workspaceId, storage)
}

View File

@ -42,6 +42,7 @@
"ws": "^8.10.0",
"@hcengineering/model": "^0.6.7",
"@hcengineering/server-token": "^0.6.7",
"@hcengineering/server-core": "^0.6.1",
"@hcengineering/mongo": "^0.6.1",
"@hcengineering/minio": "^0.6.0"
}

View File

@ -32,6 +32,7 @@ import core, {
import { MinioService } from '@hcengineering/minio'
import { consoleModelLogger, MigrateOperation, ModelLogger } from '@hcengineering/model'
import { getWorkspaceDB } from '@hcengineering/mongo'
import { StorageAdapter } from '@hcengineering/server-core'
import { Db, Document, MongoClient } from 'mongodb'
import { connect } from './connect'
import toolPlugin from './plugin'
@ -64,7 +65,7 @@ export class FileModelLogger implements ModelLogger {
/**
* @public
*/
export function prepareTools (rawTxes: Tx[]): { mongodbUri: string, minio: MinioService, txes: Tx[] } {
export function prepareTools (rawTxes: Tx[]): { mongodbUri: string, storageAdapter: StorageAdapter, txes: Tx[] } {
let minioEndpoint = process.env.MINIO_ENDPOINT
if (minioEndpoint === undefined) {
console.error('please provide minio endpoint')
@ -104,7 +105,7 @@ export function prepareTools (rawTxes: Tx[]): { mongodbUri: string, minio: Minio
secretKey: minioSecretKey
})
return { mongodbUri, minio, txes: JSON.parse(JSON.stringify(rawTxes)) as Tx[] }
return { mongodbUri, storageAdapter: minio, txes: JSON.parse(JSON.stringify(rawTxes)) as Tx[] }
}
/**
@ -117,7 +118,7 @@ export async function initModel (
migrateOperations: [string, MigrateOperation][],
logger: ModelLogger = consoleModelLogger
): Promise<CoreClient> {
const { mongodbUri, minio, txes } = prepareTools(rawTxes)
const { mongodbUri, storageAdapter: minio, txes } = prepareTools(rawTxes)
if (txes.some((tx) => tx.objectSpace !== core.space.Model)) {
throw Error('Model txes must target only core.space.Model')
}