Signed-off-by: Denis Bykhov <bykhov.denis@gmail.com>
This commit is contained in:
Denis Bykhov 2024-09-11 13:31:26 +05:00 committed by GitHub
parent 74c27d6dd7
commit bf1de1f436
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
62 changed files with 4187 additions and 632 deletions

View File

@ -190,6 +190,7 @@ jobs:
- name: Testing... - name: Testing...
run: node common/scripts/install-run-rush.js test run: node common/scripts/install-run-rush.js test
env: env:
DB_URL: 'postgresql://postgres:example@localhost:5433'
ELASTIC_URL: 'http://localhost:9201' ELASTIC_URL: 'http://localhost:9201'
MONGO_URL: 'mongodb://localhost:27018' MONGO_URL: 'mongodb://localhost:27018'
uitest: uitest:
@ -309,6 +310,71 @@ jobs:
# with: # with:
# name: db-snapshot # name: db-snapshot
# path: ./tests/db_dump # path: ./tests/db_dump
uitest-pg:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
filter: tree:0
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
- name: Cache node modules
uses: actions/cache@v4
env:
cache-name: cache-node-platform
with:
path: |
common/temp
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/pnpm-lock.yaml') }}
- name: Checking for mis-matching dependencies...
run: node common/scripts/install-run-rush.js check
- name: Installing...
run: node common/scripts/install-run-rush.js install
- name: Docker Build
run: node common/scripts/install-run-rush.js docker:build -p 20
env:
DOCKER_CLI_HINTS: false
- name: Prepare server
run: |
cd ./tests
export DO_CLEAN="true"
./prepare-pg.sh
- name: Install Playwright
run: |
cd ./tests/sanity
node ../../common/scripts/install-run-rushx.js ci
- name: Run UI tests
run: |
cd ./tests/sanity
node ../../common/scripts/install-run-rushx.js uitest
- name: 'Store docker logs'
if: always()
run: |
cd ./tests/sanity
mkdir logs
docker logs $(docker ps | grep transactor | cut -f 1 -d ' ') > logs/transactor.log
docker logs $(docker ps | grep account | cut -f 1 -d ' ') > logs/account.log
docker logs $(docker ps | grep front | cut -f 1 -d ' ') > logs/front.log
- name: Upload test results
if: always()
uses: actions/upload-artifact@v4
with:
name: playwright-results-pg
path: ./tests/sanity/playwright-report/
- name: Upload Logs
if: always()
uses: actions/upload-artifact@v4
with:
name: docker-logs-pg
path: ./tests/sanity/logs
uitest-qms: uitest-qms:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 60 timeout-minutes: 60

31
.vscode/launch.json vendored
View File

@ -59,15 +59,15 @@
"args": ["src/__start.ts"], "args": ["src/__start.ts"],
"env": { "env": {
"ELASTIC_URL": "http://localhost:9200", "ELASTIC_URL": "http://localhost:9200",
"MONGO_URL": "mongodb://localhost:27017", "MONGO_URL": "postgresql://postgres:example@localhost:5432;mongodb://localhost:27017",
"APM_SERVER_URL2": "http://localhost:8200", "APM_SERVER_URL2": "http://localhost:8200",
"METRICS_CONSOLE": "false", "METRICS_CONSOLE": "false",
"METRICS_FILE": "${workspaceRoot}/metrics.txt", // Show metrics in console evert 30 seconds., "METRICS_FILE": "${workspaceRoot}/metrics.txt", // Show metrics in console evert 30 seconds.,
"STORAGE_CONFIG": "minio|localhost?accessKey=minioadmin&secretKey=minioadmin", "STORAGE_CONFIG": "minio|localhost:9000?accessKey=minioadmin&secretKey=minioadmin",
"SERVER_SECRET": "secret", "SERVER_SECRET": "secret",
"ENABLE_CONSOLE": "true", "ENABLE_CONSOLE": "true",
"COLLABORATOR_URL": "ws://localhost:3078", "COLLABORATOR_URL": "ws://localhost:3078",
"REKONI_URL": "http://localhost:4004", "REKONI_URL": "http://localhost:4000",
"FRONT_URL": "http://localhost:8080", "FRONT_URL": "http://localhost:8080",
"ACCOUNTS_URL": "http://localhost:3000", "ACCOUNTS_URL": "http://localhost:3000",
// "SERVER_PROVIDER":"uweb" // "SERVER_PROVIDER":"uweb"
@ -240,7 +240,29 @@
"name": "Debug tool upgrade", "name": "Debug tool upgrade",
"type": "node", "type": "node",
"request": "launch", "request": "launch",
"args": ["src/__start.ts", "stress", "ws://localhost:3333", "wrong"], "args": ["src/__start.ts", "create-workspace", "sanity-ws", "-w sanity-ws"],
"env": {
"SERVER_SECRET": "secret",
"MINIO_ACCESS_KEY": "minioadmin",
"MINIO_SECRET_KEY": "minioadmin",
"MINIO_ENDPOINT": "localhost:9000",
"TRANSACTOR_URL": "ws://localhost:3333",
"MONGO_URL": "mongodb://localhost:27017",
"ACCOUNTS_URL": "http://localhost:3000",
"TELEGRAM_DATABASE": "telegram-service",
"ELASTIC_URL": "http://localhost:9200",
"REKONI_URL": "http://localhost:4000"
},
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"sourceMaps": true,
"outputCapture": "std",
"cwd": "${workspaceRoot}/dev/tool"
},
{
"name": "Debug tool move",
"type": "node",
"request": "launch",
"args": ["src/__start.ts", "move-to-pg"],
"env": { "env": {
"SERVER_SECRET": "secret", "SERVER_SECRET": "secret",
"MINIO_ACCESS_KEY": "minioadmin", "MINIO_ACCESS_KEY": "minioadmin",
@ -248,6 +270,7 @@
"MINIO_ENDPOINT": "localhost", "MINIO_ENDPOINT": "localhost",
"TRANSACTOR_URL": "ws://localhost:3333", "TRANSACTOR_URL": "ws://localhost:3333",
"MONGO_URL": "mongodb://localhost:27017", "MONGO_URL": "mongodb://localhost:27017",
"DB_URL": "postgresql://postgres:example@localhost:5432",
"ACCOUNTS_URL": "http://localhost:3000", "ACCOUNTS_URL": "http://localhost:3000",
"TELEGRAM_DATABASE": "telegram-service", "TELEGRAM_DATABASE": "telegram-service",
"ELASTIC_URL": "http://localhost:9200", "ELASTIC_URL": "http://localhost:9200",

View File

@ -629,6 +629,9 @@ dependencies:
'@rush-temp/pod-workspace': '@rush-temp/pod-workspace':
specifier: file:./projects/pod-workspace.tgz specifier: file:./projects/pod-workspace.tgz
version: file:projects/pod-workspace.tgz version: file:projects/pod-workspace.tgz
'@rush-temp/postgres':
specifier: file:./projects/postgres.tgz
version: file:projects/postgres.tgz(esbuild@0.20.1)(ts-node@10.9.2)
'@rush-temp/preference': '@rush-temp/preference':
specifier: file:./projects/preference.tgz specifier: file:./projects/preference.tgz
version: file:projects/preference.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2) version: file:projects/preference.tgz(@types/node@20.11.19)(esbuild@0.20.1)(ts-node@10.9.2)
@ -1298,6 +1301,9 @@ dependencies:
'@types/pdfjs-dist': '@types/pdfjs-dist':
specifier: 2.10.378 specifier: 2.10.378
version: 2.10.378 version: 2.10.378
'@types/pg':
specifier: ^8.11.6
version: 8.11.6
'@types/png-chunks-extract': '@types/png-chunks-extract':
specifier: ^1.0.2 specifier: ^1.0.2
version: 1.0.2 version: 1.0.2
@ -1703,6 +1709,9 @@ dependencies:
pdfjs-dist: pdfjs-dist:
specifier: 2.12.313 specifier: 2.12.313
version: 2.12.313 version: 2.12.313
pg:
specifier: 8.12.0
version: 8.12.0
png-chunks-extract: png-chunks-extract:
specifier: ^1.0.0 specifier: ^1.0.0
version: 1.0.0 version: 1.0.0
@ -4690,7 +4699,7 @@ packages:
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
dependencies: dependencies:
ajv: 6.12.6 ajv: 6.12.6
debug: 4.3.5 debug: 4.3.4
espree: 9.6.1 espree: 9.6.1
globals: 13.24.0 globals: 13.24.0
ignore: 5.3.1 ignore: 5.3.1
@ -4845,7 +4854,7 @@ packages:
engines: {node: '>=10.10.0'} engines: {node: '>=10.10.0'}
dependencies: dependencies:
'@humanwhocodes/object-schema': 2.0.2 '@humanwhocodes/object-schema': 2.0.2
debug: 4.3.5 debug: 4.3.4
minimatch: 3.1.2 minimatch: 3.1.2
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
@ -9364,6 +9373,14 @@ packages:
- worker-loader - worker-loader
dev: false dev: false
/@types/pg@8.11.6:
resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==}
dependencies:
'@types/node': 20.11.19
pg-protocol: 1.6.1
pg-types: 4.0.2
dev: false
/@types/plist@3.0.5: /@types/plist@3.0.5:
resolution: {integrity: sha512-E6OCaRmAe4WDmWNsL/9RMqdkkzDCY1etutkflWk4c+AcjDU07Pcz1fQwTX0TQz+Pxqn9i4L1TU3UFpjnrcDgxA==} resolution: {integrity: sha512-E6OCaRmAe4WDmWNsL/9RMqdkkzDCY1etutkflWk4c+AcjDU07Pcz1fQwTX0TQz+Pxqn9i4L1TU3UFpjnrcDgxA==}
requiresBuild: true requiresBuild: true
@ -9728,7 +9745,7 @@ packages:
dependencies: dependencies:
'@typescript-eslint/typescript-estree': 6.21.0(typescript@5.3.3) '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.3.3)
'@typescript-eslint/utils': 6.21.0(eslint@8.56.0)(typescript@5.3.3) '@typescript-eslint/utils': 6.21.0(eslint@8.56.0)(typescript@5.3.3)
debug: 4.3.5 debug: 4.3.4
eslint: 8.56.0 eslint: 8.56.0
ts-api-utils: 1.2.1(typescript@5.3.3) ts-api-utils: 1.2.1(typescript@5.3.3)
typescript: 5.3.3 typescript: 5.3.3
@ -9778,7 +9795,7 @@ packages:
dependencies: dependencies:
'@typescript-eslint/types': 6.21.0 '@typescript-eslint/types': 6.21.0
'@typescript-eslint/visitor-keys': 6.21.0 '@typescript-eslint/visitor-keys': 6.21.0
debug: 4.3.5 debug: 4.3.4
globby: 11.1.0 globby: 11.1.0
is-glob: 4.0.3 is-glob: 4.0.3
minimatch: 9.0.3 minimatch: 9.0.3
@ -10566,7 +10583,7 @@ packages:
builder-util: 24.13.1 builder-util: 24.13.1
builder-util-runtime: 9.2.4 builder-util-runtime: 9.2.4
chromium-pickle-js: 0.2.0 chromium-pickle-js: 0.2.0
debug: 4.3.5 debug: 4.3.4
dmg-builder: 24.13.3 dmg-builder: 24.13.3
ejs: 3.1.9 ejs: 3.1.9
electron-publish: 24.13.1 electron-publish: 24.13.1
@ -11403,7 +11420,7 @@ packages:
resolution: {integrity: sha512-upp+biKpN/XZMLim7aguUyW8s0FUpDvOtK6sbanMFDAMBzpHDqdhgVYm6zc9HJ6nWo7u2Lxk60i2M6Jd3aiNrA==} resolution: {integrity: sha512-upp+biKpN/XZMLim7aguUyW8s0FUpDvOtK6sbanMFDAMBzpHDqdhgVYm6zc9HJ6nWo7u2Lxk60i2M6Jd3aiNrA==}
engines: {node: '>=12.0.0'} engines: {node: '>=12.0.0'}
dependencies: dependencies:
debug: 4.3.5 debug: 4.3.4
sax: 1.3.0 sax: 1.3.0
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
@ -11419,7 +11436,7 @@ packages:
builder-util-runtime: 9.2.4 builder-util-runtime: 9.2.4
chalk: 4.1.2 chalk: 4.1.2
cross-spawn: 7.0.3 cross-spawn: 7.0.3
debug: 4.3.5 debug: 4.3.4
fs-extra: 10.1.0 fs-extra: 10.1.0
http-proxy-agent: 5.0.0 http-proxy-agent: 5.0.0
https-proxy-agent: 5.0.1 https-proxy-agent: 5.0.1
@ -13385,7 +13402,7 @@ packages:
has-property-descriptors: 1.0.2 has-property-descriptors: 1.0.2
has-proto: 1.0.3 has-proto: 1.0.3
has-symbols: 1.0.3 has-symbols: 1.0.3
hasown: 2.0.2 hasown: 2.0.1
internal-slot: 1.0.7 internal-slot: 1.0.7
is-array-buffer: 3.0.4 is-array-buffer: 3.0.4
is-callable: 1.2.7 is-callable: 1.2.7
@ -13531,13 +13548,13 @@ packages:
dependencies: dependencies:
get-intrinsic: 1.2.4 get-intrinsic: 1.2.4
has-tostringtag: 1.0.2 has-tostringtag: 1.0.2
hasown: 2.0.2 hasown: 2.0.1
dev: false dev: false
/es-shim-unscopables@1.0.2: /es-shim-unscopables@1.0.2:
resolution: {integrity: sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==} resolution: {integrity: sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==}
dependencies: dependencies:
hasown: 2.0.2 hasown: 2.0.1
dev: false dev: false
/es-to-primitive@1.2.1: /es-to-primitive@1.2.1:
@ -14139,7 +14156,7 @@ packages:
optionator: 0.9.3 optionator: 0.9.3
progress: 2.0.3 progress: 2.0.3
regexpp: 3.2.0 regexpp: 3.2.0
semver: 7.6.0 semver: 7.6.3
strip-ansi: 6.0.1 strip-ansi: 6.0.1
strip-json-comments: 3.1.1 strip-json-comments: 3.1.1
text-table: 0.2.0 text-table: 0.2.0
@ -14850,7 +14867,7 @@ packages:
minimatch: 3.1.2 minimatch: 3.1.2
node-abort-controller: 3.1.1 node-abort-controller: 3.1.1
schema-utils: 3.3.0 schema-utils: 3.3.0
semver: 7.6.0 semver: 7.6.3
tapable: 2.2.1 tapable: 2.2.1
typescript: 5.3.3 typescript: 5.3.3
webpack: 5.90.3(@swc/core@1.4.2)(esbuild@0.20.1)(webpack-cli@5.1.4) webpack: 5.90.3(@swc/core@1.4.2)(esbuild@0.20.1)(webpack-cli@5.1.4)
@ -15105,7 +15122,7 @@ packages:
function-bind: 1.1.2 function-bind: 1.1.2
has-proto: 1.0.3 has-proto: 1.0.3
has-symbols: 1.0.3 has-symbols: 1.0.3
hasown: 2.0.2 hasown: 2.0.1
dev: false dev: false
/get-nonce@1.0.1: /get-nonce@1.0.1:
@ -15868,7 +15885,7 @@ packages:
engines: {node: '>= 6'} engines: {node: '>= 6'}
dependencies: dependencies:
agent-base: 6.0.2 agent-base: 6.0.2
debug: 4.3.5 debug: 4.3.4
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
dev: false dev: false
@ -16037,7 +16054,7 @@ packages:
engines: {node: '>= 0.4'} engines: {node: '>= 0.4'}
dependencies: dependencies:
es-errors: 1.3.0 es-errors: 1.3.0
hasown: 2.0.2 hasown: 2.0.1
side-channel: 1.0.6 side-channel: 1.0.6
dev: false dev: false
@ -16979,7 +16996,7 @@ packages:
jest-util: 29.7.0 jest-util: 29.7.0
natural-compare: 1.4.0 natural-compare: 1.4.0
pretty-format: 29.7.0 pretty-format: 29.7.0
semver: 7.6.0 semver: 7.6.3
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
dev: false dev: false
@ -19490,6 +19507,86 @@ packages:
is-reference: 3.0.2 is-reference: 3.0.2
dev: false dev: false
/pg-cloudflare@1.1.1:
resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==}
requiresBuild: true
dev: false
optional: true
/pg-connection-string@2.6.4:
resolution: {integrity: sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==}
dev: false
/pg-int8@1.0.1:
resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==}
engines: {node: '>=4.0.0'}
dev: false
/pg-numeric@1.0.2:
resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==}
engines: {node: '>=4'}
dev: false
/pg-pool@3.6.2(pg@8.12.0):
resolution: {integrity: sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==}
peerDependencies:
pg: '>=8.0'
dependencies:
pg: 8.12.0
dev: false
/pg-protocol@1.6.1:
resolution: {integrity: sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==}
dev: false
/pg-types@2.2.0:
resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==}
engines: {node: '>=4'}
dependencies:
pg-int8: 1.0.1
postgres-array: 2.0.0
postgres-bytea: 1.0.0
postgres-date: 1.0.7
postgres-interval: 1.2.0
dev: false
/pg-types@4.0.2:
resolution: {integrity: sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==}
engines: {node: '>=10'}
dependencies:
pg-int8: 1.0.1
pg-numeric: 1.0.2
postgres-array: 3.0.2
postgres-bytea: 3.0.0
postgres-date: 2.1.0
postgres-interval: 3.0.0
postgres-range: 1.1.4
dev: false
/pg@8.12.0:
resolution: {integrity: sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==}
engines: {node: '>= 8.0.0'}
peerDependencies:
pg-native: '>=3.0.1'
peerDependenciesMeta:
pg-native:
optional: true
dependencies:
pg-connection-string: 2.6.4
pg-pool: 3.6.2(pg@8.12.0)
pg-protocol: 1.6.1
pg-types: 2.2.0
pgpass: 1.0.5
optionalDependencies:
pg-cloudflare: 1.1.1
dev: false
/pgpass@1.0.5:
resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==}
dependencies:
split2: 4.2.0
dev: false
/phin@2.9.3: /phin@2.9.3:
resolution: {integrity: sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA==} resolution: {integrity: sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA==}
deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.
@ -19782,6 +19879,54 @@ packages:
source-map-js: 1.0.2 source-map-js: 1.0.2
dev: false dev: false
/postgres-array@2.0.0:
resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==}
engines: {node: '>=4'}
dev: false
/postgres-array@3.0.2:
resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==}
engines: {node: '>=12'}
dev: false
/postgres-bytea@1.0.0:
resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==}
engines: {node: '>=0.10.0'}
dev: false
/postgres-bytea@3.0.0:
resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==}
engines: {node: '>= 6'}
dependencies:
obuf: 1.1.2
dev: false
/postgres-date@1.0.7:
resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==}
engines: {node: '>=0.10.0'}
dev: false
/postgres-date@2.1.0:
resolution: {integrity: sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==}
engines: {node: '>=12'}
dev: false
/postgres-interval@1.2.0:
resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==}
engines: {node: '>=0.10.0'}
dependencies:
xtend: 4.0.2
dev: false
/postgres-interval@3.0.0:
resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==}
engines: {node: '>=12'}
dev: false
/postgres-range@1.1.4:
resolution: {integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==}
dev: false
/posthog-js@1.122.0: /posthog-js@1.122.0:
resolution: {integrity: sha512-+8R2/nLaWyI5Jp2Ly7L52qcgDFU3xryyoNG52DPJ8dlGnagphxIc0mLNGurgyKeeTGycsOsuOIP4dtofv3ZoBA==} resolution: {integrity: sha512-+8R2/nLaWyI5Jp2Ly7L52qcgDFU3xryyoNG52DPJ8dlGnagphxIc0mLNGurgyKeeTGycsOsuOIP4dtofv3ZoBA==}
deprecated: This version of posthog-js is deprecated, please update posthog-js, and do not use this version! Check out our JS docs at https://posthog.com/docs/libraries/js deprecated: This version of posthog-js is deprecated, please update posthog-js, and do not use this version! Check out our JS docs at https://posthog.com/docs/libraries/js
@ -20646,7 +20791,7 @@ packages:
resolution: {integrity: sha512-efCx3b+0Z69/LGJmm9Yvi4cqEdxnoGnxYxGxBghkkTTFeXRtTCmmhO0AnAfHz59k957uTSuy8WaHqOs8wbYUWg==} resolution: {integrity: sha512-efCx3b+0Z69/LGJmm9Yvi4cqEdxnoGnxYxGxBghkkTTFeXRtTCmmhO0AnAfHz59k957uTSuy8WaHqOs8wbYUWg==}
engines: {node: '>=6'} engines: {node: '>=6'}
dependencies: dependencies:
debug: 4.3.5 debug: 4.3.4
module-details-from-path: 1.0.3 module-details-from-path: 1.0.3
resolve: 1.22.8 resolve: 1.22.8
transitivePeerDependencies: transitivePeerDependencies:
@ -21479,6 +21624,11 @@ packages:
engines: {node: '>=6'} engines: {node: '>=6'}
dev: false dev: false
/split2@4.2.0:
resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==}
engines: {node: '>= 10.x'}
dev: false
/sprintf-js@1.0.3: /sprintf-js@1.0.3:
resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==}
dev: false dev: false
@ -30219,6 +30369,39 @@ packages:
- supports-color - supports-color
dev: false dev: false
file:projects/postgres.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-qZVG4Pk9RAvQfkKRB1iQPPOWsKFvFuFyNBtD/ksT/eW0/ByyGABvYMeQPNenrh3ZH2n/hZ5lH5DON042MXebPg==, tarball: file:projects/postgres.tgz}
id: file:projects/postgres.tgz
name: '@rush-temp/postgres'
version: 0.0.0
dependencies:
'@types/jest': 29.5.12
'@types/node': 20.11.19
'@types/pg': 8.11.6
'@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3)
'@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.3.3)
eslint: 8.56.0
eslint-config-standard-with-typescript: 40.0.0(@typescript-eslint/eslint-plugin@6.21.0)(eslint-plugin-import@2.29.1)(eslint-plugin-n@15.7.0)(eslint-plugin-promise@6.1.1)(eslint@8.56.0)(typescript@5.3.3)
eslint-plugin-import: 2.29.1(eslint@8.56.0)
eslint-plugin-n: 15.7.0(eslint@8.56.0)
eslint-plugin-promise: 6.1.1(eslint@8.56.0)
jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2)
pg: 8.12.0
prettier: 3.2.5
ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3)
typescript: 5.3.3
transitivePeerDependencies:
- '@babel/core'
- '@jest/types'
- babel-jest
- babel-plugin-macros
- esbuild
- node-notifier
- pg-native
- supports-color
- ts-node
dev: false
file:projects/preference-assets.tgz(esbuild@0.20.1)(ts-node@10.9.2): file:projects/preference-assets.tgz(esbuild@0.20.1)(ts-node@10.9.2):
resolution: {integrity: sha512-VlBSKBg3XmuMLtxNAS703aS+dhhb5a7H5Ns2nzhhv7w3KlAqtwp6cQ5VLxceNuRaPbTtI+2K+YkjFb2S1ld5VQ==, tarball: file:projects/preference-assets.tgz} resolution: {integrity: sha512-VlBSKBg3XmuMLtxNAS703aS+dhhb5a7H5Ns2nzhhv7w3KlAqtwp6cQ5VLxceNuRaPbTtI+2K+YkjFb2S1ld5VQ==, tarball: file:projects/preference-assets.tgz}
id: file:projects/preference-assets.tgz id: file:projects/preference-assets.tgz
@ -32536,7 +32719,7 @@ packages:
dev: false dev: false
file:projects/server-pipeline.tgz: file:projects/server-pipeline.tgz:
resolution: {integrity: sha512-LU5dxarK3XG4FXu2W/Wmu1IZh93faWFIdjKmurdgNZ9AV2Mv7B6fTALyVZoA0O7gysXFNDlxvH/qq7PyfAHivQ==, tarball: file:projects/server-pipeline.tgz} resolution: {integrity: sha512-W5khmA6sUi5kU1UiRR7YTOYH40t96jeBa9ww0g1EUtraXSlO12g2NBpMXswt8/HkMVe6j/56Jj2vq8QVnXSY5w==, tarball: file:projects/server-pipeline.tgz}
name: '@rush-temp/server-pipeline' name: '@rush-temp/server-pipeline'
version: 0.0.0 version: 0.0.0
dependencies: dependencies:
@ -33412,7 +33595,7 @@ packages:
dev: false dev: false
file:projects/server.tgz(esbuild@0.20.1): file:projects/server.tgz(esbuild@0.20.1):
resolution: {integrity: sha512-fTgDuks26uZ0IhbPq9N/65R1I/hQG/gRRygfBsTmwAQqcJfo25EMz9gfLsuxu6YgZIWgI+UU6wbphtNeWLW/2A==, tarball: file:projects/server.tgz} resolution: {integrity: sha512-93PHAZJSkt0uDRAhNNcAGlJUHFM7/RLUAmzu2CZxcknO0hRM6QTDIkRg6T+GfE3fqyMuRsIJA0CDJdlziht/sA==, tarball: file:projects/server.tgz}
id: file:projects/server.tgz id: file:projects/server.tgz
name: '@rush-temp/server' name: '@rush-temp/server'
version: 0.0.0 version: 0.0.0
@ -34612,7 +34795,7 @@ packages:
dev: false dev: false
file:projects/tool.tgz(bufferutil@4.0.8)(utf-8-validate@6.0.4): file:projects/tool.tgz(bufferutil@4.0.8)(utf-8-validate@6.0.4):
resolution: {integrity: sha512-gjmLNkjPV0dXJNjkowQTvuR1ELM+S3aT3t/y9QJScPMxDIUPZRjkXIoM//4Qk3/RqUOFVhcRQTVoZVcTtK6rCg==, tarball: file:projects/tool.tgz} resolution: {integrity: sha512-LwQbmBaSOZ5IKwCHz2mULcIuEr9rZ2b/7tqUGICHCawUzexUlQVxv2Yt0oFf2aZu83Sittt7dZwnN3sXHX9t9g==, tarball: file:projects/tool.tgz}
id: file:projects/tool.tgz id: file:projects/tool.tgz
name: '@rush-temp/tool' name: '@rush-temp/tool'
version: 0.0.0 version: 0.0.0
@ -35337,7 +35520,6 @@ packages:
koa: 2.15.3 koa: 2.15.3
koa-bodyparser: 4.4.1 koa-bodyparser: 4.4.1
koa-router: 12.0.1 koa-router: 12.0.1
mongodb: 6.8.0
prettier: 3.2.5 prettier: 3.2.5
ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3) ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3)
ts-node: 10.9.2(@types/node@20.11.19)(typescript@5.3.3) ts-node: 10.9.2(@types/node@20.11.19)(typescript@5.3.3)

View File

@ -19,6 +19,16 @@ services:
ports: ports:
- 27017:27017 - 27017:27017
restart: unless-stopped restart: unless-stopped
postgres:
image: postgres
container_name: postgres
environment:
- POSTGRES_PASSWORD=example
volumes:
- db:/data/db
ports:
- 5432:5432
restart: unless-stopped
minio: minio:
image: 'minio/minio' image: 'minio/minio'
command: server /data --address ":9000" --console-address ":9001" command: server /data --address ":9000" --console-address ":9001"

View File

@ -79,10 +79,12 @@
"@hcengineering/model-task": "^0.6.0", "@hcengineering/model-task": "^0.6.0",
"@hcengineering/model-activity": "^0.6.0", "@hcengineering/model-activity": "^0.6.0",
"@hcengineering/model-lead": "^0.6.0", "@hcengineering/model-lead": "^0.6.0",
"@hcengineering/postgres": "^0.6.0",
"@hcengineering/mongo": "^0.6.1", "@hcengineering/mongo": "^0.6.1",
"@hcengineering/platform": "^0.6.11", "@hcengineering/platform": "^0.6.11",
"@hcengineering/recruit": "^0.6.29", "@hcengineering/recruit": "^0.6.29",
"@hcengineering/rekoni": "^0.6.0", "@hcengineering/rekoni": "^0.6.0",
"@hcengineering/server-pipeline": "^0.6.0",
"@hcengineering/server-attachment": "^0.6.1", "@hcengineering/server-attachment": "^0.6.1",
"@hcengineering/server-attachment-resources": "^0.6.0", "@hcengineering/server-attachment-resources": "^0.6.0",
"@hcengineering/server-collaboration": "^0.6.0", "@hcengineering/server-collaboration": "^0.6.0",

View File

@ -73,6 +73,7 @@ addLocation(serverAiBotId, () => import('@hcengineering/server-ai-bot-resources'
function prepareTools (): { function prepareTools (): {
mongodbUri: string mongodbUri: string
dbUrl: string | undefined
txes: Tx[] txes: Tx[]
version: Data<Version> version: Data<Version>
migrateOperations: [string, MigrateOperation][] migrateOperations: [string, MigrateOperation][]

View File

@ -14,11 +14,14 @@
// //
import core, { import core, {
AccountRole,
MeasureMetricsContext, MeasureMetricsContext,
RateLimiter, RateLimiter,
TxOperations, TxOperations,
concatLink, concatLink,
generateId, generateId,
getWorkspaceId,
makeCollaborativeDoc,
metricsToString, metricsToString,
newMetrics, newMetrics,
systemAccountEmail, systemAccountEmail,
@ -40,6 +43,10 @@ import os from 'os'
import { Worker, isMainThread, parentPort } from 'worker_threads' import { Worker, isMainThread, parentPort } from 'worker_threads'
import { CSVWriter } from './csv' import { CSVWriter } from './csv'
import { AvatarType, type PersonAccount } from '@hcengineering/contact'
import contact from '@hcengineering/model-contact'
import recruit from '@hcengineering/model-recruit'
import { type Vacancy } from '@hcengineering/recruit'
import { WebSocket } from 'ws' import { WebSocket } from 'ws'
interface StartMessage { interface StartMessage {
@ -503,3 +510,117 @@ export async function stressBenchmark (transactor: string, mode: StressBenchmark
} }
} }
} }
export async function testFindAll (endpoint: string, workspace: string, email: string): Promise<void> {
const connection = await connect(endpoint, getWorkspaceId(workspace), email)
try {
const client = new TxOperations(connection, core.account.System)
const start = Date.now()
const res = await client.findAll(
recruit.class.Applicant,
{},
{
lookup: {
attachedTo: recruit.mixin.Candidate,
space: recruit.class.Vacancy
}
}
)
console.log('Find all', res.length, 'time', Date.now() - start)
} finally {
await connection.close()
}
}
export async function generateWorkspaceData (
endpoint: string,
workspace: string,
parallel: boolean,
user: string
): Promise<void> {
const connection = await connect(endpoint, getWorkspaceId(workspace))
const client = new TxOperations(connection, core.account.System)
try {
const acc = await client.findOne(contact.class.PersonAccount, { email: user })
if (acc == null) {
throw new Error('User not found')
}
const employees: Ref<PersonAccount>[] = [acc._id]
const start = Date.now()
for (let i = 0; i < 100; i++) {
const acc = await generateEmployee(client)
employees.push(acc)
}
if (parallel) {
const promises: Promise<void>[] = []
for (let i = 0; i < 10; i++) {
promises.push(generateVacancy(client, employees))
}
await Promise.all(promises)
} else {
for (let i = 0; i < 10; i++) {
await generateVacancy(client, employees)
}
}
console.log('Generate', Date.now() - start)
} finally {
await connection.close()
}
}
export async function generateEmployee (client: TxOperations): Promise<Ref<PersonAccount>> {
const personId = await client.createDoc(contact.class.Person, contact.space.Contacts, {
name: generateId().toString(),
city: '',
avatarType: AvatarType.COLOR
})
await client.createMixin(personId, contact.class.Person, contact.space.Contacts, contact.mixin.Employee, {
active: true
})
const acc = await client.createDoc(contact.class.PersonAccount, core.space.Model, {
person: personId,
role: AccountRole.User,
email: personId
})
return acc
}
async function generateVacancy (client: TxOperations, members: Ref<PersonAccount>[]): Promise<void> {
// generate vacancies
const _id = generateId<Vacancy>()
await client.createDoc(
recruit.class.Vacancy,
core.space.Space,
{
name: generateId().toString(),
number: 0,
fullDescription: makeCollaborativeDoc(_id, 'fullDescription'),
type: recruit.template.DefaultVacancy,
description: '',
private: false,
members,
archived: false
},
_id
)
for (let i = 0; i < 100; i++) {
// generate candidate
const personId = await client.createDoc(contact.class.Person, contact.space.Contacts, {
name: generateId().toString(),
city: '',
avatarType: AvatarType.COLOR
})
await client.createMixin(personId, contact.class.Person, contact.space.Contacts, recruit.mixin.Candidate, {})
// generate applicants
await client.addCollection(recruit.class.Applicant, _id, personId, recruit.mixin.Candidate, 'applications', {
status: recruit.taskTypeStatus.Backlog,
number: i + 1,
identifier: `APP-${i + 1}`,
assignee: null,
rank: '',
startDate: null,
dueDate: null,
kind: recruit.taskTypes.Applicant
})
}
}

68
dev/tool/src/db.ts Normal file
View File

@ -0,0 +1,68 @@
import { type Doc, type WorkspaceId } from '@hcengineering/core'
import { getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
import { convertDoc, createTable, getDBClient, retryTxn, translateDomain } from '@hcengineering/postgres'
export async function moveFromMongoToPG (
mongoUrl: string,
dbUrl: string | undefined,
workspaces: WorkspaceId[]
): Promise<void> {
if (dbUrl === undefined) {
throw new Error('dbUrl is required')
}
const client = getMongoClient(mongoUrl)
const mongo = await client.getClient()
const pg = getDBClient(dbUrl)
const pgClient = await pg.getClient()
for (let index = 0; index < workspaces.length; index++) {
const ws = workspaces[index]
try {
const mongoDB = getWorkspaceDB(mongo, ws)
const collections = await mongoDB.collections()
await createTable(
pgClient,
collections.map((c) => c.collectionName)
)
for (const collection of collections) {
const cursor = collection.find()
const domain = translateDomain(collection.collectionName)
while (true) {
const doc = (await cursor.next()) as Doc | null
if (doc === null) break
try {
const converted = convertDoc(doc, ws.name)
await retryTxn(pgClient, async (client) => {
await client.query(
`INSERT INTO ${domain} (_id, "workspaceId", _class, "createdBy", "modifiedBy", "modifiedOn", "createdOn", space, "attachedTo", data) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
[
converted._id,
converted.workspaceId,
converted._class,
converted.createdBy,
converted.modifiedBy,
converted.modifiedOn,
converted.createdOn,
converted.space,
converted.attachedTo,
converted.data
]
)
})
} catch (err) {
console.log('error when move doc', doc._id, doc._class, err)
continue
}
}
}
if (index % 100 === 0) {
console.log('Move workspace', index, workspaces.length)
}
} catch (err) {
console.log('Error when move workspace', ws.name, err)
throw err
}
}
pg.close()
client.close()
}

View File

@ -19,6 +19,7 @@ import accountPlugin, {
assignWorkspace, assignWorkspace,
confirmEmail, confirmEmail,
createAcc, createAcc,
createWorkspace as createWorkspaceRecord,
dropAccount, dropAccount,
dropWorkspace, dropWorkspace,
dropWorkspaceFull, dropWorkspaceFull,
@ -32,10 +33,8 @@ import accountPlugin, {
setAccountAdmin, setAccountAdmin,
setRole, setRole,
updateWorkspace, updateWorkspace,
createWorkspace as createWorkspaceRecord,
type Workspace type Workspace
} from '@hcengineering/account' } from '@hcengineering/account'
import { createWorkspace, upgradeWorkspace } from '@hcengineering/workspace-service'
import { setMetadata } from '@hcengineering/platform' import { setMetadata } from '@hcengineering/platform'
import { import {
backup, backup,
@ -54,8 +53,10 @@ import serverClientPlugin, {
login, login,
selectWorkspace selectWorkspace
} from '@hcengineering/server-client' } from '@hcengineering/server-client'
import { getServerPipeline } from '@hcengineering/server-pipeline'
import serverToken, { decodeToken, generateToken } from '@hcengineering/server-token' import serverToken, { decodeToken, generateToken } from '@hcengineering/server-token'
import toolPlugin, { connect, FileModelLogger } from '@hcengineering/server-tool' import toolPlugin, { connect, FileModelLogger } from '@hcengineering/server-tool'
import { createWorkspace, upgradeWorkspace } from '@hcengineering/workspace-service'
import path from 'path' import path from 'path'
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage' import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
@ -66,6 +67,8 @@ import { diffWorkspace, recreateElastic, updateField } from './workspace'
import core, { import core, {
AccountRole, AccountRole,
concatLink,
generateId,
getWorkspaceId, getWorkspaceId,
MeasureMetricsContext, MeasureMetricsContext,
metricsToString, metricsToString,
@ -79,7 +82,7 @@ import core, {
type Tx, type Tx,
type Version, type Version,
type WorkspaceId, type WorkspaceId,
concatLink type WorkspaceIdWithUrl
} from '@hcengineering/core' } from '@hcengineering/core'
import { consoleModelLogger, type MigrateOperation } from '@hcengineering/model' import { consoleModelLogger, type MigrateOperation } from '@hcengineering/model'
import contact from '@hcengineering/model-contact' import contact from '@hcengineering/model-contact'
@ -87,7 +90,14 @@ import { getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
import type { StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core' import type { StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core'
import { deepEqual } from 'fast-equals' import { deepEqual } from 'fast-equals'
import { createWriteStream, readFileSync } from 'fs' import { createWriteStream, readFileSync } from 'fs'
import { benchmark, benchmarkWorker, stressBenchmark, type StressBenchmarkMode } from './benchmark' import {
benchmark,
benchmarkWorker,
generateWorkspaceData,
stressBenchmark,
testFindAll,
type StressBenchmarkMode
} from './benchmark'
import { import {
cleanArchivedSpaces, cleanArchivedSpaces,
cleanRemovedTransactions, cleanRemovedTransactions,
@ -101,11 +111,12 @@ import {
restoreRecruitingTaskTypes restoreRecruitingTaskTypes
} from './clean' } from './clean'
import { changeConfiguration } from './configuration' import { changeConfiguration } from './configuration'
import { moveFromMongoToPG } from './db'
import { fixJsonMarkup, migrateMarkup } from './markup' import { fixJsonMarkup, migrateMarkup } from './markup'
import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin' import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin'
import { importNotion } from './notion'
import { fixAccountEmails, renameAccount } from './renameAccount' import { fixAccountEmails, renameAccount } from './renameAccount'
import { moveFiles, syncFiles } from './storage' import { moveFiles, syncFiles } from './storage'
import { importNotion } from './notion'
const colorConstants = { const colorConstants = {
colorRed: '\u001b[31m', colorRed: '\u001b[31m',
@ -125,6 +136,7 @@ const colorConstants = {
export function devTool ( export function devTool (
prepareTools: () => { prepareTools: () => {
mongodbUri: string mongodbUri: string
dbUrl: string | undefined
txes: Tx[] txes: Tx[]
version: Data<Version> version: Data<Version>
migrateOperations: [string, MigrateOperation][] migrateOperations: [string, MigrateOperation][]
@ -1470,7 +1482,7 @@ export function devTool (
.option('-w, --workspace <workspace>', 'Selected workspace only', '') .option('-w, --workspace <workspace>', 'Selected workspace only', '')
.option('-c, --concurrency <concurrency>', 'Number of documents being processed concurrently', '10') .option('-c, --concurrency <concurrency>', 'Number of documents being processed concurrently', '10')
.action(async (cmd: { workspace: string, concurrency: string }) => { .action(async (cmd: { workspace: string, concurrency: string }) => {
const { mongodbUri } = prepareTools() const { mongodbUri, dbUrl } = prepareTools()
await withDatabase(mongodbUri, async (db, client) => { await withDatabase(mongodbUri, async (db, client) => {
await withStorage(mongodbUri, async (adapter) => { await withStorage(mongodbUri, async (adapter) => {
const workspaces = await listWorkspacesPure(db) const workspaces = await listWorkspacesPure(db)
@ -1482,8 +1494,15 @@ export function devTool (
const wsId = getWorkspaceId(workspace.workspace) const wsId = getWorkspaceId(workspace.workspace)
console.log('processing workspace', workspace.workspace, index, workspaces.length) console.log('processing workspace', workspace.workspace, index, workspaces.length)
const wsUrl: WorkspaceIdWithUrl = {
name: workspace.workspace,
workspaceName: workspace.workspaceName ?? '',
workspaceUrl: workspace.workspaceUrl ?? ''
}
await migrateMarkup(toolCtx, adapter, wsId, client, mongodbUri, parseInt(cmd.concurrency)) const { pipeline } = await getServerPipeline(toolCtx, mongodbUri, dbUrl, wsUrl)
await migrateMarkup(toolCtx, adapter, wsId, client, pipeline, parseInt(cmd.concurrency))
console.log('...done', workspace.workspace) console.log('...done', workspace.workspace)
index++ index++
@ -1502,6 +1521,57 @@ export function devTool (
}) })
}) })
program.command('move-to-pg').action(async () => {
const { mongodbUri, dbUrl } = prepareTools()
await withDatabase(mongodbUri, async (db) => {
const workspaces = await listWorkspacesRaw(db)
await moveFromMongoToPG(
mongodbUri,
dbUrl,
workspaces.map((it) => getWorkspaceId(it.workspace))
)
})
})
program
.command('perfomance')
.option('-p, --parallel', '', false)
.action(async (cmd: { parallel: boolean }) => {
const { mongodbUri, txes, version, migrateOperations } = prepareTools()
await withDatabase(mongodbUri, async (db) => {
const email = generateId()
const ws = generateId()
const wsid = getWorkspaceId(ws)
const start = new Date()
const measureCtx = new MeasureMetricsContext('create-workspace', {})
const wsInfo = await createWorkspaceRecord(measureCtx, db, null, email, ws, ws)
// update the record so it's not taken by one of the workers for the next 60 seconds
await updateWorkspace(db, wsInfo, {
mode: 'creating',
progress: 0,
lastProcessingTime: Date.now() + 1000 * 60
})
await createWorkspace(measureCtx, version, null, wsInfo, txes, migrateOperations)
await updateWorkspace(db, wsInfo, {
mode: 'active',
progress: 100,
disabled: false,
version
})
await createAcc(toolCtx, db, null, email, '1234', '', '', true)
await assignWorkspace(toolCtx, db, null, email, ws, AccountRole.User)
console.log('Workspace created in', new Date().getTime() - start.getTime(), 'ms')
const token = generateToken(systemAccountEmail, wsid)
const endpoint = await getTransactorEndpoint(token, 'external')
await generateWorkspaceData(endpoint, ws, cmd.parallel, email)
await testFindAll(endpoint, ws, email)
await dropWorkspace(toolCtx, db, null, ws)
})
})
extendProgram?.(program) extendProgram?.(program)
program.parse(process.argv) program.parse(process.argv)

View File

@ -14,8 +14,8 @@ import core, {
makeCollaborativeDoc makeCollaborativeDoc
} from '@hcengineering/core' } from '@hcengineering/core'
import { getMongoClient, getWorkspaceDB } from '@hcengineering/mongo' import { getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
import { type StorageAdapter } from '@hcengineering/server-core' import { type Pipeline, type StorageAdapter } from '@hcengineering/server-core'
import { connect, fetchModelFromMongo } from '@hcengineering/server-tool' import { connect, fetchModel } from '@hcengineering/server-tool'
import { jsonToText, markupToYDoc } from '@hcengineering/text' import { jsonToText, markupToYDoc } from '@hcengineering/text'
import { type Db, type FindCursor, type MongoClient } from 'mongodb' import { type Db, type FindCursor, type MongoClient } from 'mongodb'
@ -120,10 +120,10 @@ export async function migrateMarkup (
storageAdapter: StorageAdapter, storageAdapter: StorageAdapter,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
client: MongoClient, client: MongoClient,
mongodbUri: string, pipeline: Pipeline,
concurrency: number concurrency: number
): Promise<void> { ): Promise<void> {
const { hierarchy } = await fetchModelFromMongo(ctx, mongodbUri, workspaceId) const { hierarchy } = await fetchModel(ctx, pipeline)
const workspaceDb = client.db(workspaceId.name) const workspaceDb = client.db(workspaceId.name)

View File

@ -69,37 +69,41 @@ async function migrateAvatars (client: MigrationClient): Promise<void> {
_class: { $in: classes }, _class: { $in: classes },
avatar: { $regex: 'color|gravatar://.*' } avatar: { $regex: 'color|gravatar://.*' }
}) })
while (true) { try {
const docs = await i.next(50) while (true) {
if (docs === null || docs?.length === 0) { const docs = await i.next(50)
break if (docs === null || docs?.length === 0) {
} break
const updates: { filter: MigrationDocumentQuery<Contact>, update: MigrateUpdate<Contact> }[] = [] }
for (const d of docs) { const updates: { filter: MigrationDocumentQuery<Contact>, update: MigrateUpdate<Contact> }[] = []
if (d.avatar?.startsWith(colorPrefix) ?? false) { for (const d of docs) {
d.avatarProps = { color: d.avatar?.slice(colorPrefix.length) ?? '' } if (d.avatar?.startsWith(colorPrefix) ?? false) {
updates.push({ d.avatarProps = { color: d.avatar?.slice(colorPrefix.length) ?? '' }
filter: { _id: d._id }, updates.push({
update: { filter: { _id: d._id },
avatarType: AvatarType.COLOR, update: {
avatar: null, avatarType: AvatarType.COLOR,
avatarProps: { color: d.avatar?.slice(colorPrefix.length) ?? '' } avatar: null,
} avatarProps: { color: d.avatar?.slice(colorPrefix.length) ?? '' }
}) }
} else if (d.avatar?.startsWith(gravatarPrefix) ?? false) { })
updates.push({ } else if (d.avatar?.startsWith(gravatarPrefix) ?? false) {
filter: { _id: d._id }, updates.push({
update: { filter: { _id: d._id },
avatarType: AvatarType.GRAVATAR, update: {
avatar: null, avatarType: AvatarType.GRAVATAR,
avatarProps: { url: d.avatar?.slice(gravatarPrefix.length) ?? '' } avatar: null,
} avatarProps: { url: d.avatar?.slice(gravatarPrefix.length) ?? '' }
}) }
})
}
}
if (updates.length > 0) {
await client.bulk(DOMAIN_CONTACT, updates)
} }
} }
if (updates.length > 0) { } finally {
await client.bulk(DOMAIN_CONTACT, updates) await i.close()
}
} }
await client.update( await client.update(

View File

@ -20,6 +20,7 @@ import {
DOMAIN_CONFIGURATION, DOMAIN_CONFIGURATION,
DOMAIN_DOC_INDEX_STATE, DOMAIN_DOC_INDEX_STATE,
DOMAIN_MIGRATION, DOMAIN_MIGRATION,
DOMAIN_SPACE,
DOMAIN_STATUS, DOMAIN_STATUS,
DOMAIN_TRANSIENT, DOMAIN_TRANSIENT,
DOMAIN_TX, DOMAIN_TX,
@ -76,7 +77,6 @@ import {
} from './core' } from './core'
import { definePermissions } from './permissions' import { definePermissions } from './permissions'
import { import {
DOMAIN_SPACE,
TAccount, TAccount,
TPermission, TPermission,
TRole, TRole,
@ -101,7 +101,7 @@ import {
TTxWorkspaceEvent TTxWorkspaceEvent
} from './tx' } from './tx'
export { coreId } from '@hcengineering/core' export { coreId, DOMAIN_SPACE } from '@hcengineering/core'
export * from './core' export * from './core'
export { coreOperation } from './migration' export { coreOperation } from './migration'
export * from './security' export * from './security'

View File

@ -17,6 +17,7 @@ import { saveCollaborativeDoc } from '@hcengineering/collaboration'
import core, { import core, {
DOMAIN_BLOB, DOMAIN_BLOB,
DOMAIN_DOC_INDEX_STATE, DOMAIN_DOC_INDEX_STATE,
DOMAIN_SPACE,
DOMAIN_STATUS, DOMAIN_STATUS,
DOMAIN_TX, DOMAIN_TX,
MeasureMetricsContext, MeasureMetricsContext,
@ -49,7 +50,6 @@ import {
} from '@hcengineering/model' } from '@hcengineering/model'
import { type StorageAdapter, type StorageAdapterEx } from '@hcengineering/storage' import { type StorageAdapter, type StorageAdapterEx } from '@hcengineering/storage'
import { markupToYDoc } from '@hcengineering/text' import { markupToYDoc } from '@hcengineering/text'
import { DOMAIN_SPACE } from './security'
async function migrateStatusesToModel (client: MigrationClient): Promise<void> { async function migrateStatusesToModel (client: MigrationClient): Promise<void> {
// Move statuses to model: // Move statuses to model:
@ -326,6 +326,16 @@ export const coreOperation: MigrateOperation = {
core.class.TypedSpace core.class.TypedSpace
) )
} }
},
{
state: 'default-space',
func: async (client) => {
await createDefaultSpace(client, core.space.Tx, { name: 'Space for all txes' })
await createDefaultSpace(client, core.space.DerivedTx, { name: 'Space for derived txes' })
await createDefaultSpace(client, core.space.Model, { name: 'Space for model' })
await createDefaultSpace(client, core.space.Configuration, { name: 'Space for config' })
await createDefaultSpace(client, core.space.Workspace, { name: 'Space for common things' })
}
} }
]) ])
} }

View File

@ -15,13 +15,13 @@
import { import {
DOMAIN_MODEL, DOMAIN_MODEL,
DOMAIN_SPACE,
IndexKind, IndexKind,
type Account, type Account,
type AccountRole, type AccountRole,
type Arr, type Arr,
type Class, type Class,
type CollectionSize, type CollectionSize,
type Domain,
type Permission, type Permission,
type Ref, type Ref,
type Role, type Role,
@ -48,8 +48,6 @@ import { getEmbeddedLabel, type Asset, type IntlString } from '@hcengineering/pl
import core from './component' import core from './component'
import { TAttachedDoc, TDoc } from './core' import { TAttachedDoc, TDoc } from './core'
export const DOMAIN_SPACE = 'space' as Domain
// S P A C E // S P A C E
@Model(core.class.Space, core.class.Doc, DOMAIN_SPACE) @Model(core.class.Space, core.class.Doc, DOMAIN_SPACE)

View File

@ -318,6 +318,11 @@ export interface TypeAny<AnyComponent = any> extends Type<any> {
*/ */
export const DOMAIN_MODEL = 'model' as Domain export const DOMAIN_MODEL = 'model' as Domain
/**
* @public
*/
export const DOMAIN_SPACE = 'space' as Domain
/** /**
* @public * @public
*/ */

View File

@ -165,6 +165,17 @@ function $unset (document: Doc, keyval: Record<string, PropertyType>): void {
} }
} }
function $rename (document: Doc, keyval: Record<string, string>): void {
const doc = document as any
for (const key in keyval) {
if (doc[key] !== undefined) {
doc[keyval[key]] = doc[key]
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete doc[key]
}
}
}
const operators: Record<string, _OperatorFunc> = { const operators: Record<string, _OperatorFunc> = {
$push, $push,
$pull, $pull,
@ -172,7 +183,8 @@ const operators: Record<string, _OperatorFunc> = {
$move, $move,
$pushMixin, $pushMixin,
$inc, $inc,
$unset $unset,
$rename
} }
/** /**

View File

@ -15,7 +15,8 @@
import type { Account, Doc, Domain, Ref } from './classes' import type { Account, Doc, Domain, Ref } from './classes'
import { MeasureContext } from './measurements' import { MeasureContext } from './measurements'
import type { Tx } from './tx' import { DocumentQuery, FindOptions } from './storage'
import type { DocumentUpdate, Tx } from './tx'
import type { WorkspaceIdWithUrl } from './utils' import type { WorkspaceIdWithUrl } from './utils'
/** /**
@ -48,6 +49,8 @@ export interface SessionData {
sessionId: string sessionId: string
admin?: boolean admin?: boolean
isTriggerCtx?: boolean
account: Account account: Account
getAccount: (account: Ref<Account>) => Account | undefined getAccount: (account: Ref<Account>) => Account | undefined
@ -76,6 +79,23 @@ export interface LowLevelStorage {
// Low level direct group API // Low level direct group API
groupBy: <T>(ctx: MeasureContext, domain: Domain, field: string) => Promise<Set<T>> groupBy: <T>(ctx: MeasureContext, domain: Domain, field: string) => Promise<Set<T>>
// migrations
rawFindAll: <T extends Doc>(domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>) => Promise<T[]>
rawUpdate: <T extends Doc>(domain: Domain, query: DocumentQuery<T>, operations: DocumentUpdate<T>) => Promise<void>
// Traverse documents
traverse: <T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
options?: Pick<FindOptions<T>, 'sort' | 'limit' | 'projection'>
) => Promise<Iterator<T>>
}
export interface Iterator<T extends Doc> {
next: (count: number) => Promise<T[] | null>
close: () => Promise<void>
} }
export interface Branding { export interface Branding {

View File

@ -80,7 +80,7 @@ export interface MigrationClient {
traverse: <T extends Doc>( traverse: <T extends Doc>(
domain: Domain, domain: Domain,
query: MigrationDocumentQuery<T>, query: MigrationDocumentQuery<T>,
options?: Omit<FindOptions<T>, 'lookup'> options?: Pick<FindOptions<T>, 'sort' | 'limit' | 'projection'>
) => Promise<MigrationIterator<T>> ) => Promise<MigrationIterator<T>>
// Allow to raw update documents inside domain. // Allow to raw update documents inside domain.
@ -88,15 +88,15 @@ export interface MigrationClient {
domain: Domain, domain: Domain,
query: MigrationDocumentQuery<T>, query: MigrationDocumentQuery<T>,
operations: MigrateUpdate<T> operations: MigrateUpdate<T>
) => Promise<MigrationResult> ) => Promise<void>
bulk: <T extends Doc>( bulk: <T extends Doc>(
domain: Domain, domain: Domain,
operations: { filter: MigrationDocumentQuery<T>, update: MigrateUpdate<T> }[] operations: { filter: MigrationDocumentQuery<T>, update: MigrateUpdate<T> }[]
) => Promise<MigrationResult> ) => Promise<void>
// Move documents per domain // Move documents per domain
move: <T extends Doc>(sourceDomain: Domain, query: DocumentQuery<T>, targetDomain: Domain) => Promise<MigrationResult> move: <T extends Doc>(sourceDomain: Domain, query: DocumentQuery<T>, targetDomain: Domain) => Promise<void>
create: <T extends Doc>(domain: Domain, doc: T | T[]) => Promise<void> create: <T extends Doc>(domain: Domain, doc: T | T[]) => Promise<void>
delete: <T extends Doc>(domain: Domain, _id: Ref<T>) => Promise<void> delete: <T extends Doc>(domain: Domain, _id: Ref<T>) => Promise<void>

View File

@ -13,7 +13,15 @@
// limitations under the License. // limitations under the License.
--> -->
<script lang="ts"> <script lang="ts">
import { AvatarType, Channel, combineName, ContactEvents, Employee, PersonAccount } from '@hcengineering/contact' import {
AvatarType,
Channel,
combineName,
ContactEvents,
Employee,
Person,
PersonAccount
} from '@hcengineering/contact'
import core, { AccountRole, AttachedData, Data, generateId, Ref } from '@hcengineering/core' import core, { AccountRole, AttachedData, Data, generateId, Ref } from '@hcengineering/core'
import login from '@hcengineering/login' import login from '@hcengineering/login'
import { getResource } from '@hcengineering/platform' import { getResource } from '@hcengineering/platform'
@ -42,10 +50,9 @@
let saving: boolean = false let saving: boolean = false
const person: Data<Employee> = { const person: Data<Person> = {
name: '', name: '',
city: '', city: '',
active: true,
avatarType: AvatarType.COLOR avatarType: AvatarType.COLOR
} }

View File

@ -1,5 +1,6 @@
// Copyright © 2022 Hardcore Engineering Inc. // Copyright © 2022 Hardcore Engineering Inc.
import { Analytics } from '@hcengineering/analytics'
import core, { import core, {
type Class, type Class,
type Data, type Data,
@ -10,19 +11,12 @@ import core, {
} from '@hcengineering/core' } from '@hcengineering/core'
import { type Asset } from '@hcengineering/platform' import { type Asset } from '@hcengineering/platform'
import { getClient } from '@hcengineering/presentation' import { getClient } from '@hcengineering/presentation'
import { import { type TagCategory, type TagElement, type TagReference, TagsEvents } from '@hcengineering/tags'
type InitialKnowledge,
type TagCategory,
type TagElement,
type TagReference,
TagsEvents
} from '@hcengineering/tags'
import { type ColorDefinition, getColorNumberByText } from '@hcengineering/ui' import { type ColorDefinition, getColorNumberByText } from '@hcengineering/ui'
import { type Filter } from '@hcengineering/view' import { type Filter } from '@hcengineering/view'
import { FilterQuery } from '@hcengineering/view-resources' import { FilterQuery } from '@hcengineering/view-resources'
import { writable } from 'svelte/store' import { writable } from 'svelte/store'
import tags from './plugin' import tags from './plugin'
import { Analytics } from '@hcengineering/analytics'
export function getTagStyle (color: ColorDefinition, selected = false): string { export function getTagStyle (color: ColorDefinition, selected = false): string {
return ` return `
@ -37,11 +31,10 @@ export async function getRefs (filter: Filter, onUpdate: () => void): Promise<Ar
const promise = new Promise<Array<Ref<Doc>>>((resolve, reject) => { const promise = new Promise<Array<Ref<Doc>>>((resolve, reject) => {
const level = filter.props?.level ?? 0 const level = filter.props?.level ?? 0
const q: DocumentQuery<TagReference> = { const q: DocumentQuery<TagReference> = {
tag: { $in: filter.value }, tag: { $in: filter.value }
weight: }
level === 0 if (level > 0) {
? { $in: [null as unknown as InitialKnowledge, 0, 1, 2, 3, 4, 5, 6, 7, 8] } q.weight = { $gte: level as TagReference['weight'] }
: { $gte: level as TagReference['weight'] }
} }
const refresh = lq.query(tags.class.TagReference, q, (refs: FindResult<TagReference>) => { const refresh = lq.query(tags.class.TagReference, q, (refs: FindResult<TagReference>) => {
const result = Array.from(new Set(refs.map((p) => p.attachedTo))) const result = Array.from(new Set(refs.map((p) => p.attachedTo)))

View File

@ -15,7 +15,7 @@
"_phase:bundle": "rushx bundle", "_phase:bundle": "rushx bundle",
"_phase:docker-build": "rushx docker:build", "_phase:docker-build": "rushx docker:build",
"_phase:docker-staging": "rushx docker:staging", "_phase:docker-staging": "rushx docker:staging",
"bundle": "mkdir -p bundle && esbuild src/__start.ts --sourcemap=inline --bundle --keep-names --platform=node --external:*.node --external:bufferutil --external:snappy --external:utf-8-validate --external:msgpackr-extract --define:process.env.MODEL_VERSION=$(node ../../common/scripts/show_version.js) --define:process.env.GIT_REVISION=$(../../common/scripts/git_version.sh) --outfile=bundle/bundle.js --log-level=error --sourcemap=external", "bundle": "mkdir -p bundle && esbuild src/__start.ts --sourcemap=inline --bundle --keep-names --platform=node --external:*.node --external:bufferutil --external:snappy --external:utf-8-validate --external:msgpackr-extract --define:process.env.GIT_REVISION=$(../../common/scripts/git_version.sh) --outfile=bundle/bundle.js --log-level=error --sourcemap=external",
"docker:build": "../../common/scripts/docker_build.sh hardcoreeng/transactor", "docker:build": "../../common/scripts/docker_build.sh hardcoreeng/transactor",
"docker:tbuild": "docker build -t hardcoreeng/transactor . --platform=linux/amd64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/transactor", "docker:tbuild": "docker build -t hardcoreeng/transactor . --platform=linux/amd64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/transactor",
"docker:abuild": "docker build -t hardcoreeng/transactor . --platform=linux/arm64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/transactor", "docker:abuild": "docker build -t hardcoreeng/transactor . --platform=linux/arm64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/transactor",

View File

@ -31,7 +31,7 @@ registerStringLoaders()
* @public * @public
*/ */
export function start ( export function start (
dbUrl: string, dbUrls: string,
opt: { opt: {
fullTextUrl: string fullTextUrl: string
storageConfig: StorageConfiguration storageConfig: StorageConfiguration
@ -57,18 +57,20 @@ export function start (
registerServerPlugins() registerServerPlugins()
const externalStorage = buildStorageFromConfig(opt.storageConfig, dbUrl) const [mainDbUrl, rawDbUrl] = dbUrls.split(';')
const externalStorage = buildStorageFromConfig(opt.storageConfig, rawDbUrl ?? mainDbUrl)
const pipelineFactory = createServerPipeline( const pipelineFactory = createServerPipeline(
metrics, metrics,
dbUrl, dbUrls,
{ ...opt, externalStorage }, { ...opt, externalStorage, adapterSecurity: rawDbUrl !== undefined },
{ {
serviceAdapters: { serviceAdapters: {
[serverAiBotId]: { [serverAiBotId]: {
factory: createAIBotAdapter, factory: createAIBotAdapter,
db: '%ai-bot', db: '%ai-bot',
url: dbUrl url: rawDbUrl ?? mainDbUrl
} }
} }
} }

View File

@ -836,6 +836,11 @@
"projectFolder": "server/mongo", "projectFolder": "server/mongo",
"shouldPublish": false "shouldPublish": false
}, },
{
"packageName": "@hcengineering/postgres",
"projectFolder": "server/postgres",
"shouldPublish": false
},
{ {
"packageName": "@hcengineering/elastic", "packageName": "@hcengineering/elastic",
"projectFolder": "server/elastic", "projectFolder": "server/elastic",

View File

@ -35,11 +35,12 @@ export function serveAccount (measureCtx: MeasureContext, brandings: BrandingMap
console.log('Starting account service with brandings: ', brandings) console.log('Starting account service with brandings: ', brandings)
const methods = getMethods() const methods = getMethods()
const ACCOUNT_PORT = parseInt(process.env.ACCOUNT_PORT ?? '3000') const ACCOUNT_PORT = parseInt(process.env.ACCOUNT_PORT ?? '3000')
const dbUri = process.env.MONGO_URL const dbUrls = process.env.MONGO_URL
if (dbUri === undefined) { if (dbUrls === undefined) {
console.log('Please provide mongodb url') console.log('Please provide db url')
process.exit(1) process.exit(1)
} }
const [dbUrl, mongoUrl] = dbUrls.split(';')
const transactorUri = process.env.TRANSACTOR_URL const transactorUri = process.env.TRANSACTOR_URL
if (transactorUri === undefined) { if (transactorUri === undefined) {
@ -89,7 +90,7 @@ export function serveAccount (measureCtx: MeasureContext, brandings: BrandingMap
} }
setMetadata(serverClientPlugin.metadata.UserAgent, 'AccountService') setMetadata(serverClientPlugin.metadata.UserAgent, 'AccountService')
const client: MongoClientReference = getMongoClient(dbUri) const client: MongoClientReference = getMongoClient(mongoUrl ?? dbUrl)
let _client: MongoClient | Promise<MongoClient> = client.getClient() let _client: MongoClient | Promise<MongoClient> = client.getClient()
const app = new Koa() const app = new Koa()

View File

@ -33,6 +33,7 @@ import core, {
generateId, generateId,
getWorkspaceId, getWorkspaceId,
groupByArray, groupByArray,
isWorkspaceCreating,
MeasureContext, MeasureContext,
RateLimiter, RateLimiter,
Ref, Ref,
@ -42,13 +43,11 @@ import core, {
TxOperations, TxOperations,
Version, Version,
versionToString, versionToString,
isWorkspaceCreating,
WorkspaceId, WorkspaceId,
type Branding, type Branding,
type WorkspaceMode type WorkspaceMode
} from '@hcengineering/core' } from '@hcengineering/core'
import platform, { getMetadata, PlatformError, Severity, Status, translate } from '@hcengineering/platform' import platform, { getMetadata, PlatformError, Severity, Status, translate } from '@hcengineering/platform'
import { type StorageAdapter } from '@hcengineering/server-core' import { type StorageAdapter } from '@hcengineering/server-core'
import { decodeToken as decodeTokenRaw, generateToken, type Token } from '@hcengineering/server-token' import { decodeToken as decodeTokenRaw, generateToken, type Token } from '@hcengineering/server-token'
import toolPlugin, { connect } from '@hcengineering/server-tool' import toolPlugin, { connect } from '@hcengineering/server-tool'

View File

@ -14,6 +14,7 @@
// //
import { import {
type LowLevelStorage,
type Class, type Class,
type Doc, type Doc,
type DocumentQuery, type DocumentQuery,
@ -26,7 +27,6 @@ import {
type MeasureContext, type MeasureContext,
type ModelDb, type ModelDb,
type Ref, type Ref,
type StorageIterator,
type Tx, type Tx,
type TxResult, type TxResult,
type WorkspaceId type WorkspaceId
@ -36,7 +36,7 @@ import type { ServerFindOptions } from './types'
export interface DomainHelperOperations { export interface DomainHelperOperations {
create: (domain: Domain) => Promise<void> create: (domain: Domain) => Promise<void>
exists: (domain: Domain) => boolean exists: (domain: Domain) => Promise<boolean>
listDomains: () => Promise<Set<Domain>> listDomains: () => Promise<Set<Domain>>
createIndex: (domain: Domain, value: string | FieldIndexConfig<Doc>, options?: { name: string }) => Promise<void> createIndex: (domain: Domain, value: string | FieldIndexConfig<Doc>, options?: { name: string }) => Promise<void>
@ -99,8 +99,8 @@ export type DbAdapterHandler = (
/** /**
* @public * @public
*/ */
export interface DbAdapter { export interface DbAdapter extends LowLevelStorage {
init?: () => Promise<void> init?: (domains?: string[], excludeDomains?: string[]) => Promise<void>
helper: () => DomainHelperOperations helper: () => DomainHelperOperations
@ -114,14 +114,6 @@ export interface DbAdapter {
tx: (ctx: MeasureContext, ...tx: Tx[]) => Promise<TxResult[]> tx: (ctx: MeasureContext, ...tx: Tx[]) => Promise<TxResult[]>
find: (ctx: MeasureContext, domain: Domain, recheck?: boolean) => StorageIterator
load: (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]) => Promise<Doc[]>
upload: (ctx: MeasureContext, domain: Domain, docs: Doc[]) => Promise<void>
clean: (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]) => Promise<void>
groupBy: <T>(ctx: MeasureContext, domain: Domain, field: string) => Promise<Set<T>>
// Bulk update operations // Bulk update operations
update: (ctx: MeasureContext, domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>) => Promise<void> update: (ctx: MeasureContext, domain: Domain, operations: Map<Ref<Doc>, DocumentUpdate<Doc>>) => Promise<void>

View File

@ -93,7 +93,7 @@ export class DomainIndexHelperImpl implements DomainHelper {
const added = new Set<string>() const added = new Set<string>()
try { try {
if (!operations.exists(domain)) { if (!(await operations.exists(domain))) {
return return
} }
const has50Documents = documents > 50 const has50Documents = documents > 50

View File

@ -23,6 +23,7 @@ import core, {
type FindResult, type FindResult,
type Hierarchy, type Hierarchy,
type IndexingConfiguration, type IndexingConfiguration,
type Iterator,
type MeasureContext, type MeasureContext,
ModelDb, ModelDb,
type Ref, type Ref,
@ -32,12 +33,25 @@ import core, {
type TxResult, type TxResult,
type WorkspaceId type WorkspaceId
} from '@hcengineering/core' } from '@hcengineering/core'
import { type DbAdapter, type DomainHelperOperations } from './adapter' import { type DbAdapterHandler, type DbAdapter, type DomainHelperOperations } from './adapter'
/** /**
* @public * @public
*/ */
export class DummyDbAdapter implements DbAdapter { export class DummyDbAdapter implements DbAdapter {
on?: ((handler: DbAdapterHandler) => void) | undefined
async traverse<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
options?: Pick<FindOptions<T>, 'sort' | 'limit' | 'projection'>
): Promise<Iterator<T>> {
return {
next: async () => [],
close: async () => {}
}
}
async findAll<T extends Doc>( async findAll<T extends Doc>(
ctx: MeasureContext, ctx: MeasureContext,
_class: Ref<Class<T>>, _class: Ref<Class<T>>,
@ -52,7 +66,7 @@ export class DummyDbAdapter implements DbAdapter {
helper (): DomainHelperOperations { helper (): DomainHelperOperations {
return { return {
create: async () => {}, create: async () => {},
exists: () => true, exists: async () => true,
listDomains: async () => new Set(), listDomains: async () => new Set(),
createIndex: async () => {}, createIndex: async () => {},
dropIndex: async () => {}, dropIndex: async () => {},
@ -90,6 +104,16 @@ export class DummyDbAdapter implements DbAdapter {
async groupBy<T>(ctx: MeasureContext, domain: Domain, field: string): Promise<Set<T>> { async groupBy<T>(ctx: MeasureContext, domain: Domain, field: string): Promise<Set<T>> {
return new Set() return new Set()
} }
async rawFindAll<T extends Doc>(domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>): Promise<T[]> {
return []
}
async rawUpdate<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
operations: DocumentUpdate<T>
): Promise<void> {}
} }
class InMemoryAdapter extends DummyDbAdapter implements DbAdapter { class InMemoryAdapter extends DummyDbAdapter implements DbAdapter {

View File

@ -24,6 +24,7 @@ import {
FindResult, FindResult,
Hierarchy, Hierarchy,
IndexingConfiguration, IndexingConfiguration,
Iterator,
MeasureContext, MeasureContext,
Ref, Ref,
StorageIterator, StorageIterator,
@ -33,7 +34,7 @@ import {
WorkspaceId WorkspaceId
} from '@hcengineering/core' } from '@hcengineering/core'
import { getMetadata } from '@hcengineering/platform' import { getMetadata } from '@hcengineering/platform'
import serverCore, { DbAdapter, type DomainHelperOperations } from '@hcengineering/server-core' import serverCore, { DbAdapter, DbAdapterHandler, type DomainHelperOperations } from '@hcengineering/server-core'
function getIndexName (): string { function getIndexName (): string {
return getMetadata(serverCore.metadata.ElasticIndexName) ?? 'storage_index' return getMetadata(serverCore.metadata.ElasticIndexName) ?? 'storage_index'
@ -61,10 +62,21 @@ class ElasticDataAdapter implements DbAdapter {
this.getDocId = (fulltext) => fulltext.slice(0, -1 * (this.workspaceString.length + 1)) as Ref<Doc> this.getDocId = (fulltext) => fulltext.slice(0, -1 * (this.workspaceString.length + 1)) as Ref<Doc>
} }
init?: ((domains?: string[], excludeDomains?: string[]) => Promise<void>) | undefined
on?: ((handler: DbAdapterHandler) => void) | undefined
async traverse<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
options?: Pick<FindOptions<T>, 'sort' | 'limit' | 'projection'>
): Promise<Iterator<T>> {
throw new Error('Method not implemented.')
}
helper (): DomainHelperOperations { helper (): DomainHelperOperations {
return { return {
create: async () => {}, create: async () => {},
exists: () => true, exists: async () => true,
listDomains: async () => new Set(), listDomains: async () => new Set(),
createIndex: async () => {}, createIndex: async () => {},
dropIndex: async () => {}, dropIndex: async () => {},
@ -118,6 +130,18 @@ class ElasticDataAdapter implements DbAdapter {
throw new Error('Method not implemented.') throw new Error('Method not implemented.')
} }
async rawFindAll<T extends Doc>(domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>): Promise<T[]> {
throw new Error('Method not implemented.')
}
async rawUpdate<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
operations: DocumentUpdate<T>
): Promise<void> {
throw new Error('Method not implemented.')
}
async clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> { async clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> {
const indexExists = await this.client.indices.exists({ const indexExists = await this.client.indices.exists({
index: this.indexName index: this.indexName

View File

@ -13,14 +13,15 @@
// limitations under the License. // limitations under the License.
// //
import { type MeasureContext } from '@hcengineering/core' import { DOMAIN_TX, type MeasureContext } from '@hcengineering/core'
import { PlatformError, unknownStatus } from '@hcengineering/platform' import { PlatformError, unknownStatus } from '@hcengineering/platform'
import type { import type {
DbAdapter, DbAdapter,
DbConfiguration, DbConfiguration,
Middleware, Middleware,
MiddlewareCreator, MiddlewareCreator,
PipelineContext PipelineContext,
TxAdapter
} from '@hcengineering/server-core' } from '@hcengineering/server-core'
import { BaseMiddleware, createServiceAdaptersManager, DbAdapterManagerImpl } from '@hcengineering/server-core' import { BaseMiddleware, createServiceAdaptersManager, DbAdapterManagerImpl } from '@hcengineering/server-core'
@ -67,11 +68,51 @@ export class DBAdapterMiddleware extends BaseMiddleware implements Middleware {
} }
}) })
const txAdapterName = this.conf.domains[DOMAIN_TX]
const txAdapter = adapters.get(txAdapterName) as TxAdapter
const txAdapterDomains: string[] = []
for (const key in this.conf.domains) {
if (this.conf.domains[key] === txAdapterName) {
txAdapterDomains.push(key)
}
}
await txAdapter.init?.(txAdapterDomains)
const model = await txAdapter.getModel(ctx)
for (const tx of model) {
try {
this.context.hierarchy.tx(tx)
} catch (err: any) {
ctx.warn('failed to apply model transaction, skipping', { tx: JSON.stringify(tx), err })
}
}
await ctx.with('init-adapters', {}, async (ctx) => { await ctx.with('init-adapters', {}, async (ctx) => {
for (const adapter of adapters.values()) { for (const [key, adapter] of adapters) {
await adapter.init?.() // already initialized
if (key !== this.conf.domains[DOMAIN_TX] && adapter.init !== undefined) {
let excludeDomains: string[] | undefined
let domains: string[] | undefined
if (this.conf.defaultAdapter === key) {
excludeDomains = []
for (const domain in this.conf.domains) {
if (this.conf.domains[domain] !== key) {
excludeDomains.push(domain)
}
}
} else {
domains = []
for (const domain in this.conf.domains) {
if (this.conf.domains[domain] === key) {
domains.push(domain)
}
}
}
await adapter.init(domains, excludeDomains)
}
} }
}) })
const metrics = this.conf.metrics.newChild('📔 server-storage', {}) const metrics = this.conf.metrics.newChild('📔 server-storage', {})
const defaultAdapter = adapters.get(this.conf.defaultAdapter) const defaultAdapter = adapters.get(this.conf.defaultAdapter)

View File

@ -13,7 +13,17 @@
// limitations under the License. // limitations under the License.
// //
import { type Doc, type Domain, type MeasureContext, type Ref, type StorageIterator } from '@hcengineering/core' import {
DocumentQuery,
DocumentUpdate,
FindOptions,
type Doc,
type Domain,
type MeasureContext,
type Ref,
type StorageIterator,
type Iterator
} from '@hcengineering/core'
import { PlatformError, unknownStatus } from '@hcengineering/platform' import { PlatformError, unknownStatus } from '@hcengineering/platform'
import type { Middleware, PipelineContext } from '@hcengineering/server-core' import type { Middleware, PipelineContext } from '@hcengineering/server-core'
import { BaseMiddleware } from '@hcengineering/server-core' import { BaseMiddleware } from '@hcengineering/server-core'
@ -48,8 +58,25 @@ export class LowLevelMiddleware extends BaseMiddleware implements Middleware {
async clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> { async clean (ctx: MeasureContext, domain: Domain, docs: Ref<Doc>[]): Promise<void> {
await adapterManager.getAdapter(domain, true).clean(ctx, domain, docs) await adapterManager.getAdapter(domain, true).clean(ctx, domain, docs)
}, },
async groupBy (ctx, domain, field) { async groupBy<T>(ctx: MeasureContext, domain: Domain, field: string): Promise<Set<T>> {
return await adapterManager.getAdapter(domain, false).groupBy(ctx, domain, field) return await adapterManager.getAdapter(domain, false).groupBy(ctx, domain, field)
},
async rawFindAll<T extends Doc>(domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>): Promise<T[]> {
return await adapterManager.getAdapter(domain, false).rawFindAll(domain, query, options)
},
async rawUpdate<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
operations: DocumentUpdate<T>
): Promise<void> {
await adapterManager.getAdapter(domain, true).rawUpdate(domain, query, operations)
},
async traverse<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
options?: Pick<FindOptions<T>, 'sort' | 'limit' | 'projection'>
): Promise<Iterator<T>> {
return await adapterManager.getAdapter(domain, false).traverse(domain, query, options)
} }
} }
return undefined return undefined

View File

@ -73,12 +73,21 @@ export class SpaceSecurityMiddleware extends BaseMiddleware implements Middlewar
core.space.Tx core.space.Tx
] ]
private constructor (
private readonly skipFindCheck: boolean,
context: PipelineContext,
next?: Middleware
) {
super(context, next)
}
static async create ( static async create (
skipFindCheck: boolean,
ctx: MeasureContext, ctx: MeasureContext,
context: PipelineContext, context: PipelineContext,
next: Middleware | undefined next: Middleware | undefined
): Promise<SpaceSecurityMiddleware> { ): Promise<SpaceSecurityMiddleware> {
return new SpaceSecurityMiddleware(context, next) return new SpaceSecurityMiddleware(skipFindCheck, context, next)
} }
private resyncDomains (): void { private resyncDomains (): void {
@ -496,7 +505,7 @@ export class SpaceSecurityMiddleware extends BaseMiddleware implements Middlewar
let clientFilterSpaces: Set<Ref<Space>> | undefined let clientFilterSpaces: Set<Ref<Space>> | undefined
if (!isSystem(account) && account.role !== AccountRole.DocGuest && domain !== DOMAIN_MODEL) { if (!this.skipFindCheck && !isSystem(account) && account.role !== AccountRole.DocGuest && domain !== DOMAIN_MODEL) {
if (!isOwner(account, ctx) || !isSpace) { if (!isOwner(account, ctx) || !isSpace) {
if (query[field] !== undefined) { if (query[field] !== undefined) {
const res = await this.mergeQuery(ctx, account, query[field], domain, isSpace) const res = await this.mergeQuery(ctx, account, query[field], domain, isSpace)

View File

@ -87,6 +87,9 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware {
const findAll: SessionFindAll = async (ctx, _class, query, options) => { const findAll: SessionFindAll = async (ctx, _class, query, options) => {
const _ctx: MeasureContext = (options as ServerFindOptions<Doc>)?.ctx ?? ctx const _ctx: MeasureContext = (options as ServerFindOptions<Doc>)?.ctx ?? ctx
delete (options as ServerFindOptions<Doc>)?.ctx delete (options as ServerFindOptions<Doc>)?.ctx
if (_ctx.contextData !== undefined) {
_ctx.contextData.isTriggerCtx = true
}
const results = await this.findAll(_ctx, _class, query, options) const results = await this.findAll(_ctx, _class, query, options)
return toFindResult( return toFindResult(

View File

@ -16,6 +16,7 @@
import core, { import core, {
DOMAIN_MODEL, DOMAIN_MODEL,
DOMAIN_TX, DOMAIN_TX,
type Iterator,
SortingOrder, SortingOrder,
TxProcessor, TxProcessor,
addOperation, addOperation,
@ -162,6 +163,103 @@ abstract class MongoAdapterBase implements DbAdapter {
this._db = new DBCollectionHelper(db) this._db = new DBCollectionHelper(db)
} }
async traverse<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
options?: Pick<FindOptions<T>, 'sort' | 'limit' | 'projection'>
): Promise<Iterator<T>> {
let cursor = this.db.collection(domain).find<T>(this.translateRawQuery(query))
if (options?.limit !== undefined) {
cursor = cursor.limit(options.limit)
}
if (options !== null && options !== undefined) {
if (options.sort !== undefined) {
const sort: Sort = {}
for (const key in options.sort) {
const order = options.sort[key] === SortingOrder.Ascending ? 1 : -1
sort[key] = order
}
cursor = cursor.sort(sort)
}
}
return {
next: async (size: number) => {
const docs: T[] = []
while (docs.length < size && (await cursor.hasNext())) {
try {
const d = await cursor.next()
if (d !== null) {
docs.push(d)
} else {
break
}
} catch (err) {
console.error(err)
return null
}
}
return docs
},
close: async () => {
await cursor.close()
}
}
}
private translateRawQuery<T extends Doc>(query: DocumentQuery<T>): Filter<Document> {
const translated: any = {}
for (const key in query) {
const value = (query as any)[key]
if (value !== null && typeof value === 'object') {
const keys = Object.keys(value)
if (keys[0] === '$like') {
const pattern = value.$like as string
translated[key] = {
$regex: `^${pattern.split('%').join('.*')}$`,
$options: 'i'
}
continue
}
}
translated[key] = value
}
return translated
}
async rawFindAll<T extends Doc>(domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>): Promise<T[]> {
let cursor = this.db.collection(domain).find<T>(this.translateRawQuery(query))
if (options?.limit !== undefined) {
cursor = cursor.limit(options.limit)
}
if (options !== null && options !== undefined) {
if (options.sort !== undefined) {
const sort: Sort = {}
for (const key in options.sort) {
const order = options.sort[key] === SortingOrder.Ascending ? 1 : -1
sort[key] = order
}
cursor = cursor.sort(sort)
}
}
return await cursor.toArray()
}
async rawUpdate<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
operations: DocumentUpdate<T>
): Promise<void> {
if (isOperator(operations)) {
await this.db
.collection(domain)
.updateMany(this.translateRawQuery(query), { ...operations } as unknown as UpdateFilter<Document>)
} else {
await this.db
.collection(domain)
.updateMany(this.translateRawQuery(query), { $set: { ...operations, '%hash%': null } })
}
}
abstract init (): Promise<void> abstract init (): Promise<void>
collection<TSchema extends Document = Document>(domain: Domain): Collection<TSchema> { collection<TSchema extends Document = Document>(domain: Domain): Collection<TSchema> {
@ -255,15 +353,15 @@ abstract class MongoAdapterBase implements DbAdapter {
return { base: translatedBase, lookup: translatedLookup } return { base: translatedBase, lookup: translatedLookup }
} }
private async getLookupValue<T extends Doc>( private getLookupValue<T extends Doc>(
clazz: Ref<Class<T>>, clazz: Ref<Class<T>>,
lookup: Lookup<T>, lookup: Lookup<T>,
result: LookupStep[], result: LookupStep[],
parent?: string parent?: string
): Promise<void> { ): void {
for (const key in lookup) { for (const key in lookup) {
if (key === '_id') { if (key === '_id') {
await this.getReverseLookupValue(lookup, result, parent) this.getReverseLookupValue(lookup, result, parent)
continue continue
} }
const value = (lookup as any)[key] const value = (lookup as any)[key]
@ -280,7 +378,7 @@ abstract class MongoAdapterBase implements DbAdapter {
as: fullKey.split('.').join('') + '_lookup' as: fullKey.split('.').join('') + '_lookup'
}) })
} }
await this.getLookupValue(_class, nested, result, fullKey + '_lookup') this.getLookupValue(_class, nested, result, fullKey + '_lookup')
} else { } else {
const _class = value as Ref<Class<Doc>> const _class = value as Ref<Class<Doc>>
const tkey = this.checkMixinKey(key, clazz) const tkey = this.checkMixinKey(key, clazz)
@ -298,11 +396,7 @@ abstract class MongoAdapterBase implements DbAdapter {
} }
} }
private async getReverseLookupValue ( private getReverseLookupValue (lookup: ReverseLookups, result: LookupStep[], parent?: string): void {
lookup: ReverseLookups,
result: LookupStep[],
parent?: string
): Promise<any | undefined> {
const fullKey = parent !== undefined ? parent + '.' + '_id' : '_id' const fullKey = parent !== undefined ? parent + '.' + '_id' : '_id'
const lid = lookup?._id ?? {} const lid = lookup?._id ?? {}
for (const key in lid) { for (const key in lid) {
@ -319,7 +413,9 @@ abstract class MongoAdapterBase implements DbAdapter {
_class = value _class = value
} }
const domain = this.hierarchy.getDomain(_class) const domain = this.hierarchy.getDomain(_class)
const desc = this.hierarchy.getDescendants(_class) const desc = this.hierarchy
.getDescendants(this.hierarchy.getBaseClass(_class))
.filter((it) => !this.hierarchy.isMixin(it))
if (domain !== DOMAIN_MODEL) { if (domain !== DOMAIN_MODEL) {
const asVal = as.split('.').join('') + '_lookup' const asVal = as.split('.').join('') + '_lookup'
const step: LookupStep = { const step: LookupStep = {
@ -340,14 +436,14 @@ abstract class MongoAdapterBase implements DbAdapter {
} }
} }
private async getLookups<T extends Doc>( private getLookups<T extends Doc>(
_class: Ref<Class<T>>, _class: Ref<Class<T>>,
lookup: Lookup<T> | undefined, lookup: Lookup<T> | undefined,
parent?: string parent?: string
): Promise<LookupStep[]> { ): LookupStep[] {
if (lookup === undefined) return [] if (lookup === undefined) return []
const result: [] = [] const result: [] = []
await this.getLookupValue(_class, lookup, result, parent) this.getLookupValue(_class, lookup, result, parent)
return result return result
} }
@ -482,8 +578,7 @@ abstract class MongoAdapterBase implements DbAdapter {
const tquery = this.translateQuery(clazz, query, options) const tquery = this.translateQuery(clazz, query, options)
const slowPipeline = isLookupQuery(query) || isLookupSort(options?.sort) const slowPipeline = isLookupQuery(query) || isLookupSort(options?.sort)
const steps = await ctx.with('get-lookups', {}, async () => await this.getLookups(clazz, options?.lookup)) const steps = this.getLookups(clazz, options?.lookup)
if (slowPipeline) { if (slowPipeline) {
if (Object.keys(tquery.base).length > 0) { if (Object.keys(tquery.base).length > 0) {
pipeline.push({ $match: tquery.base }) pipeline.push({ $match: tquery.base })

View File

@ -81,7 +81,8 @@ class MongoClientReferenceImpl {
} }
this.onclose() this.onclose()
void (async () => { void (async () => {
await (await this.client).close() const cl = await this.client
await cl.close()
})() })()
} }
} }
@ -214,7 +215,7 @@ export class DBCollectionHelper implements DomainHelperOperations {
} }
} }
exists (domain: Domain): boolean { async exists (domain: Domain): Promise<boolean> {
return this.collections.has(domain) return this.collections.has(domain)
} }
@ -242,7 +243,7 @@ export class DBCollectionHelper implements DomainHelperOperations {
} }
async estimatedCount (domain: Domain): Promise<number> { async estimatedCount (domain: Domain): Promise<number> {
if (this.exists(domain)) { if (await this.exists(domain)) {
const c = this.collection(domain) const c = this.collection(domain)
return await c.estimatedDocumentCount() return await c.estimatedDocumentCount()
} }

View File

@ -0,0 +1,7 @@
module.exports = {
extends: ['./node_modules/@hcengineering/platform-rig/profiles/node/eslint.config.json'],
parserOptions: {
tsconfigRootDir: __dirname,
project: './tsconfig.json'
}
}

View File

@ -0,0 +1,4 @@
*
!/lib/**
!CHANGELOG.md
/lib/**/__tests__/

View File

@ -0,0 +1,5 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
"rigPackageName": "@hcengineering/platform-rig",
"rigProfile": "node"
}

View File

@ -0,0 +1,7 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
testMatch: ['**/?(*.)+(spec|test).[jt]s?(x)'],
roots: ["./src"],
coverageReporters: ["text-summary", "html"]
}

View File

@ -0,0 +1,43 @@
{
"name": "@hcengineering/postgres",
"version": "0.6.0",
"main": "lib/index.js",
"svelte": "src/index.ts",
"types": "types/index.d.ts",
"author": "Copyright © Hardcore Engineering Inc.",
"template": "@hcengineering/node-package",
"license": "EPL-2.0",
"scripts": {
"build": "compile",
"build:watch": "compile",
"test": "jest --passWithNoTests --silent --forceExit",
"format": "format src",
"_phase:build": "compile transpile src",
"_phase:test": "jest --passWithNoTests --silent --forceExit",
"_phase:format": "format src",
"_phase:validate": "compile validate"
},
"devDependencies": {
"@hcengineering/platform-rig": "^0.6.0",
"@typescript-eslint/eslint-plugin": "^6.11.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-promise": "^6.1.1",
"eslint-plugin-n": "^15.4.0",
"eslint": "^8.54.0",
"@typescript-eslint/parser": "^6.11.0",
"eslint-config-standard-with-typescript": "^40.0.0",
"prettier": "^3.1.0",
"typescript": "^5.3.3",
"jest": "^29.7.0",
"ts-jest": "^29.1.1",
"@types/jest": "^29.5.5",
"@types/node": "~20.11.16",
"@types/pg": "^8.11.6"
},
"dependencies": {
"pg": "8.12.0",
"@hcengineering/core": "^0.6.32",
"@hcengineering/platform": "^0.6.11",
"@hcengineering/server-core": "^0.6.1"
}
}

View File

@ -0,0 +1,233 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import core, {
type Account,
type Arr,
type AttachedDoc,
type Class,
ClassifierKind,
type Data,
type Doc,
DOMAIN_DOC_INDEX_STATE,
DOMAIN_MODEL,
DOMAIN_TX,
type Mixin,
type Obj,
type Ref,
type TxCreateDoc,
type TxCUD,
TxFactory,
AccountRole
} from '@hcengineering/core'
import type { IntlString, Plugin } from '@hcengineering/platform'
import { plugin } from '@hcengineering/platform'
export const txFactory = new TxFactory(core.account.System)
export function createClass (_class: Ref<Class<Obj>>, attributes: Data<Class<Obj>>): TxCreateDoc<Doc> {
return txFactory.createTxCreateDoc(core.class.Class, core.space.Model, attributes, _class)
}
/**
* @public
*/
export function createDoc<T extends Doc> (
_class: Ref<Class<T>>,
attributes: Data<T>,
id?: Ref<T>,
modifiedBy?: Ref<Account>
): TxCreateDoc<Doc> {
const result = txFactory.createTxCreateDoc(_class, core.space.Model, attributes, id)
if (modifiedBy !== undefined) {
result.modifiedBy = modifiedBy
}
return result
}
/**
* @public
*/
export interface TestMixin extends Doc {
arr: Arr<string>
}
/**
* @public
*/
export interface AttachedComment extends AttachedDoc {
message: string
}
/**
* @public
*/
export const test = plugin('test' as Plugin, {
mixin: {
TestMixin: '' as Ref<Mixin<TestMixin>>
},
class: {
TestComment: '' as Ref<Class<AttachedComment>>
}
})
/**
* @public
* Generate minimal model for testing purposes.
* @returns R
*/
export function genMinModel (): TxCUD<Doc>[] {
const txes = []
// Fill Tx'es with basic model classes.
txes.push(createClass(core.class.Obj, { label: 'Obj' as IntlString, kind: ClassifierKind.CLASS }))
txes.push(
createClass(core.class.Doc, { label: 'Doc' as IntlString, extends: core.class.Obj, kind: ClassifierKind.CLASS })
)
txes.push(
createClass(core.class.AttachedDoc, {
label: 'AttachedDoc' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.MIXIN
})
)
txes.push(
createClass(core.class.Class, {
label: 'Class' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_MODEL
})
)
txes.push(
createClass(core.class.Space, {
label: 'Space' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_MODEL
})
)
txes.push(
createClass(core.class.DocIndexState, {
label: 'DocIndexState' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_DOC_INDEX_STATE
})
)
txes.push(
createClass(core.class.Account, {
label: 'Account' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_MODEL
})
)
txes.push(
createClass(core.class.Tx, {
label: 'Tx' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_TX
})
)
txes.push(
createClass(core.class.TxCUD, {
label: 'TxCUD' as IntlString,
extends: core.class.Tx,
kind: ClassifierKind.CLASS,
domain: DOMAIN_TX
})
)
txes.push(
createClass(core.class.TxCreateDoc, {
label: 'TxCreateDoc' as IntlString,
extends: core.class.TxCUD,
kind: ClassifierKind.CLASS
})
)
txes.push(
createClass(core.class.TxUpdateDoc, {
label: 'TxUpdateDoc' as IntlString,
extends: core.class.TxCUD,
kind: ClassifierKind.CLASS
})
)
txes.push(
createClass(core.class.TxRemoveDoc, {
label: 'TxRemoveDoc' as IntlString,
extends: core.class.TxCUD,
kind: ClassifierKind.CLASS
})
)
txes.push(
createClass(core.class.TxCollectionCUD, {
label: 'TxCollectionCUD' as IntlString,
extends: core.class.TxCUD,
kind: ClassifierKind.CLASS
})
)
txes.push(
createClass(test.mixin.TestMixin, {
label: 'TestMixin' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.MIXIN
})
)
txes.push(
createClass(test.class.TestComment, {
label: 'TestComment' as IntlString,
extends: core.class.AttachedDoc,
kind: ClassifierKind.CLASS
})
)
const u1 = 'User1' as Ref<Account>
const u2 = 'User2' as Ref<Account>
txes.push(
createDoc(core.class.Account, { email: 'user1@site.com', role: AccountRole.User }, u1),
createDoc(core.class.Account, { email: 'user2@site.com', role: AccountRole.User }, u2),
createDoc(core.class.Space, {
name: 'Sp1',
description: '',
private: false,
archived: false,
members: [u1, u2]
})
)
txes.push(
createDoc(core.class.Space, {
name: 'Sp2',
description: '',
private: false,
archived: false,
members: [u1]
})
)
txes.push(
createClass(core.class.DomainIndexConfiguration, {
label: 'DomainIndexConfiguration' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_MODEL
})
)
return txes
}

View File

@ -0,0 +1,328 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import core, {
type Client,
type ClientConnection,
createClient,
type Doc,
type DocChunk,
type Domain,
generateId,
getWorkspaceId,
Hierarchy,
MeasureMetricsContext,
ModelDb,
type Ref,
SortingOrder,
type Space,
TxOperations
} from '@hcengineering/core'
import { type DbAdapter } from '@hcengineering/server-core'
import { createPostgresAdapter, createPostgresTxAdapter } from '..'
import { getDBClient, type PostgresClientReference, shutdown } from '../utils'
import { genMinModel } from './minmodel'
import { createTaskModel, type Task, type TaskComment, taskPlugin } from './tasks'
const txes = genMinModel()
createTaskModel(txes)
describe('postgres operations', () => {
const baseDbUri: string = process.env.DB_URL ?? 'postgresql://postgres:example@localhost:5433'
let dbId: string = 'pg_testdb_' + generateId()
let dbUri: string = baseDbUri + '/' + dbId
const clientRef: PostgresClientReference = getDBClient(baseDbUri)
let hierarchy: Hierarchy
let model: ModelDb
let client: Client
let operations: TxOperations
let serverStorage: DbAdapter
afterAll(async () => {
clientRef.close()
await shutdown()
})
beforeEach(async () => {
try {
dbId = 'pg_testdb_' + generateId()
dbUri = baseDbUri + '/' + dbId
const client = await clientRef.getClient()
await client.query(`CREATE DATABASE ${dbId}`)
} catch (err) {
console.error(err)
}
})
afterEach(async () => {
try {
// await client.close()
// await (await clientRef.getClient()).query(`DROP DATABASE ${dbId}`)
} catch (err) {
console.log(err)
}
await serverStorage?.close()
})
async function initDb (): Promise<void> {
// Remove all stuff from database.
hierarchy = new Hierarchy()
model = new ModelDb(hierarchy)
for (const t of txes) {
hierarchy.tx(t)
}
for (const t of txes) {
await model.tx(t)
}
const mctx = new MeasureMetricsContext('', {})
const txStorage = await createPostgresTxAdapter(mctx, hierarchy, dbUri, getWorkspaceId(dbId), model)
// Put all transactions to Tx
for (const t of txes) {
await txStorage.tx(mctx, t)
}
await txStorage.close()
const ctx = new MeasureMetricsContext('client', {})
const serverStorage = await createPostgresAdapter(ctx, hierarchy, dbUri, getWorkspaceId(dbId), model)
await serverStorage.init?.()
client = await createClient(async (handler) => {
const st: ClientConnection = {
isConnected: () => true,
findAll: async (_class, query, options) => await serverStorage.findAll(ctx, _class, query, options),
tx: async (tx) => await serverStorage.tx(ctx, tx),
searchFulltext: async () => ({ docs: [] }),
close: async () => {},
loadChunk: async (domain): Promise<DocChunk> => await Promise.reject(new Error('unsupported')),
closeChunk: async (idx) => {},
loadDocs: async (domain: Domain, docs: Ref<Doc>[]) => [],
upload: async (domain: Domain, docs: Doc[]) => {},
clean: async (domain: Domain, docs: Ref<Doc>[]) => {},
loadModel: async () => txes,
getAccount: async () => ({}) as any,
sendForceClose: async () => {}
}
return st
})
operations = new TxOperations(client, core.account.System)
}
beforeEach(async () => {
jest.setTimeout(30000)
await initDb()
})
it('check add', async () => {
const times: number[] = []
for (let i = 0; i < 50; i++) {
const t = Date.now()
await operations.createDoc(taskPlugin.class.Task, '' as Ref<Space>, {
name: `my-task-${i}`,
description: `${i * i}`,
rate: 20 + i
})
times.push(Date.now() - t)
}
console.log('createDoc times', times)
const r = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(r.length).toEqual(50)
})
it('check find by criteria', async () => {
jest.setTimeout(20000)
for (let i = 0; i < 50; i++) {
await operations.createDoc(taskPlugin.class.Task, '' as Ref<Space>, {
name: `my-task-${i}`,
description: `${i * i}`,
rate: 20 + i
})
}
const r = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(r.length).toEqual(50)
const first = await client.findAll<Task>(taskPlugin.class.Task, { name: 'my-task-0' })
expect(first.length).toEqual(1)
const second = await client.findAll<Task>(taskPlugin.class.Task, { name: { $like: '%0' } })
expect(second.length).toEqual(5)
const third = await client.findAll<Task>(taskPlugin.class.Task, { rate: { $in: [25, 26, 27, 28] } })
expect(third.length).toEqual(4)
})
it('check update', async () => {
await operations.createDoc(taskPlugin.class.Task, '' as Ref<Space>, {
name: 'my-task',
description: 'some data ',
rate: 20,
arr: []
})
const doc = (await client.findAll<Task>(taskPlugin.class.Task, {}))[0]
await operations.updateDoc(doc._class, doc.space, doc._id, { rate: 30 })
let tasks = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(tasks.length).toEqual(1)
expect(tasks[0].rate).toEqual(30)
await operations.updateDoc(doc._class, doc.space, doc._id, { $inc: { rate: 1 } })
tasks = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(tasks.length).toEqual(1)
expect(tasks[0].rate).toEqual(31)
await operations.updateDoc(doc._class, doc.space, doc._id, { $inc: { rate: -1 } })
tasks = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(tasks.length).toEqual(1)
expect(tasks[0].rate).toEqual(30)
await operations.updateDoc(doc._class, doc.space, doc._id, { $push: { arr: 1 } })
tasks = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(tasks.length).toEqual(1)
expect(tasks[0].arr?.length).toEqual(1)
expect(tasks[0].arr?.[0]).toEqual(1)
await operations.updateDoc(doc._class, doc.space, doc._id, { $push: { arr: 3 } })
tasks = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(tasks.length).toEqual(1)
expect(tasks[0].arr?.length).toEqual(2)
expect(tasks[0].arr?.[0]).toEqual(1)
expect(tasks[0].arr?.[1]).toEqual(3)
})
it('check remove', async () => {
for (let i = 0; i < 10; i++) {
await operations.createDoc(taskPlugin.class.Task, '' as Ref<Space>, {
name: `my-task-${i}`,
description: `${i * i}`,
rate: 20 + i
})
}
let r = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(r.length).toEqual(10)
await operations.removeDoc<Task>(taskPlugin.class.Task, '' as Ref<Space>, r[0]._id)
r = await client.findAll<Task>(taskPlugin.class.Task, {})
expect(r.length).toEqual(9)
})
it('limit and sorting', async () => {
for (let i = 0; i < 5; i++) {
await operations.createDoc(taskPlugin.class.Task, '' as Ref<Space>, {
name: `my-task-${i}`,
description: `${i * i}`,
rate: 20 + i
})
}
const without = await client.findAll(taskPlugin.class.Task, {})
expect(without).toHaveLength(5)
const limit = await client.findAll(taskPlugin.class.Task, {}, { limit: 1 })
expect(limit).toHaveLength(1)
const sortAsc = await client.findAll(taskPlugin.class.Task, {}, { sort: { name: SortingOrder.Ascending } })
expect(sortAsc[0].name).toMatch('my-task-0')
const sortDesc = await client.findAll(taskPlugin.class.Task, {}, { sort: { name: SortingOrder.Descending } })
expect(sortDesc[0].name).toMatch('my-task-4')
})
it('check attached', async () => {
const docId = await operations.createDoc(taskPlugin.class.Task, '' as Ref<Space>, {
name: 'my-task',
description: 'Descr',
rate: 20
})
const commentId = await operations.addCollection(
taskPlugin.class.TaskComment,
'' as Ref<Space>,
docId,
taskPlugin.class.Task,
'tasks',
{
message: 'my-msg',
date: new Date()
}
)
await operations.addCollection(
taskPlugin.class.TaskComment,
'' as Ref<Space>,
docId,
taskPlugin.class.Task,
'tasks',
{
message: 'my-msg2',
date: new Date()
}
)
const r2 = await client.findAll<TaskComment>(
taskPlugin.class.TaskComment,
{},
{
lookup: {
attachedTo: taskPlugin.class.Task
}
}
)
expect(r2.length).toEqual(2)
expect((r2[0].$lookup?.attachedTo as Task)?._id).toEqual(docId)
const r3 = await client.findAll<Task>(
taskPlugin.class.Task,
{},
{
lookup: {
_id: { comment: taskPlugin.class.TaskComment }
}
}
)
expect(r3).toHaveLength(1)
expect((r3[0].$lookup as any).comment).toHaveLength(2)
const comment2Id = await operations.addCollection(
taskPlugin.class.TaskComment,
'' as Ref<Space>,
commentId,
taskPlugin.class.TaskComment,
'comments',
{
message: 'my-msg3',
date: new Date()
}
)
const r4 = await client.findAll<TaskComment>(
taskPlugin.class.TaskComment,
{
_id: comment2Id
},
{
lookup: { attachedTo: [taskPlugin.class.TaskComment, { attachedTo: taskPlugin.class.Task } as any] }
}
)
expect((r4[0].$lookup?.attachedTo as TaskComment)?._id).toEqual(commentId)
expect(((r4[0].$lookup?.attachedTo as any)?.$lookup.attachedTo as Task)?._id).toEqual(docId)
})
})

View File

@ -0,0 +1,112 @@
import {
type Account,
type AttachedDoc,
type Class,
ClassifierKind,
type Data,
type Doc,
type Domain,
type Ref,
type Space,
type Tx
} from '@hcengineering/core'
import { type IntlString, plugin, type Plugin } from '@hcengineering/platform'
import { createClass } from './minmodel'
export interface TaskComment extends AttachedDoc {
message: string
date: Date
}
export enum TaskStatus {
Open,
Close,
Resolved = 100,
InProgress
}
export enum TaskReproduce {
Always = 'always',
Rare = 'rare',
Sometimes = 'sometimes'
}
export interface Task extends Doc {
name: string
description: string
rate?: number
status?: TaskStatus
reproduce?: TaskReproduce
eta?: TaskEstimate | null
arr?: number[]
}
/**
* Define ROM and Estimated Time to arrival
*/
export interface TaskEstimate extends AttachedDoc {
rom: number // in hours
eta: number // in hours
}
export interface TaskMixin extends Task {
textValue?: string
}
export interface TaskWithSecond extends Task {
secondTask: string | null
}
const taskIds = 'taskIds' as Plugin
export const taskPlugin = plugin(taskIds, {
class: {
Task: '' as Ref<Class<Task>>,
TaskEstimate: '' as Ref<Class<TaskEstimate>>,
TaskComment: '' as Ref<Class<TaskComment>>
}
})
/**
* Create a random task with name specified
* @param name
*/
export function createTask (name: string, rate: number, description: string): Data<Task> {
return {
name,
description,
rate
}
}
export const doc1: Task = {
_id: 'd1' as Ref<Task>,
_class: taskPlugin.class.Task,
name: 'my-space',
description: 'some-value',
rate: 20,
modifiedBy: 'user' as Ref<Account>,
modifiedOn: 10,
// createdOn: 10,
space: '' as Ref<Space>
}
export function createTaskModel (txes: Tx[]): void {
txes.push(
createClass(taskPlugin.class.Task, {
kind: ClassifierKind.CLASS,
label: 'Task' as IntlString,
domain: 'test-task' as Domain
}),
createClass(taskPlugin.class.TaskEstimate, {
kind: ClassifierKind.CLASS,
label: 'Estimate' as IntlString,
domain: 'test-task' as Domain
}),
createClass(taskPlugin.class.TaskComment, {
kind: ClassifierKind.CLASS,
label: 'Comment' as IntlString,
domain: 'test-task' as Domain
})
)
}

View File

@ -0,0 +1,17 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
export * from './storage'
export { getDBClient, convertDoc, createTable, retryTxn, translateDomain } from './utils'

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,391 @@
//
// Copyright © 2024 Hardcore Engineering Inc.
//
// Licensed under the Eclipse Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. You may
// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
import core, {
type Account,
AccountRole,
type Class,
type Doc,
type Domain,
type FieldIndexConfig,
generateId,
type Projection,
type Ref,
type WorkspaceId
} from '@hcengineering/core'
import { PlatformError, unknownStatus } from '@hcengineering/platform'
import { type DomainHelperOperations } from '@hcengineering/server-core'
import { Pool, type PoolClient } from 'pg'
const connections = new Map<string, PostgresClientReferenceImpl>()
// Register close on process exit.
process.on('exit', () => {
shutdown().catch((err) => {
console.error(err)
})
})
const clientRefs = new Map<string, ClientRef>()
export async function retryTxn (pool: Pool, operation: (client: PoolClient) => Promise<any>): Promise<any> {
const backoffInterval = 100 // millis
const maxTries = 5
let tries = 0
const client = await pool.connect()
try {
while (true) {
await client.query('BEGIN;')
tries++
try {
const result = await operation(client)
await client.query('COMMIT;')
return result
} catch (err: any) {
await client.query('ROLLBACK;')
if (err.code !== '40001' || tries === maxTries) {
throw err
} else {
console.log('Transaction failed. Retrying.')
console.log(err.message)
await new Promise((resolve) => setTimeout(resolve, tries * backoffInterval))
}
}
}
} finally {
client.release()
}
}
export async function createTable (client: Pool, domains: string[]): Promise<void> {
if (domains.length === 0) {
return
}
const mapped = domains.map((p) => translateDomain(p))
const inArr = mapped.map((it) => `'${it}'`).join(', ')
const exists = await client.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_name IN (${inArr})
`)
const toCreate = mapped.filter((it) => !exists.rows.map((it) => it.table_name).includes(it))
await retryTxn(client, async (client) => {
for (const domain of toCreate) {
await client.query(
`CREATE TABLE ${domain} (
"workspaceId" VARCHAR(255) NOT NULL,
_id VARCHAR(255) NOT NULL,
_class VARCHAR(255) NOT NULL,
"createdBy" VARCHAR(255),
"modifiedBy" VARCHAR(255) NOT NULL,
"modifiedOn" bigint NOT NULL,
"createdOn" bigint,
space VARCHAR(255) NOT NULL,
"attachedTo" VARCHAR(255),
data JSONB NOT NULL,
PRIMARY KEY("workspaceId", _id)
)`
)
await client.query(`
CREATE INDEX ${domain}_attachedTo ON ${domain} ("attachedTo")
`)
await client.query(`
CREATE INDEX ${domain}_class ON ${domain} (_class)
`)
await client.query(`
CREATE INDEX ${domain}_space ON ${domain} (space)
`)
await client.query(`
CREATE INDEX ${domain}_idxgin ON ${domain} USING GIN (data)
`)
}
})
}
/**
* @public
*/
export async function shutdown (): Promise<void> {
for (const c of connections.values()) {
c.close(true)
}
connections.clear()
}
export interface PostgresClientReference {
getClient: () => Promise<Pool>
close: () => void
}
class PostgresClientReferenceImpl {
count: number
client: Pool | Promise<Pool>
constructor (
client: Pool | Promise<Pool>,
readonly onclose: () => void
) {
this.count = 0
this.client = client
}
async getClient (): Promise<Pool> {
if (this.client instanceof Promise) {
this.client = await this.client
}
return this.client
}
close (force: boolean = false): void {
this.count--
if (this.count === 0 || force) {
if (force) {
this.count = 0
}
void (async () => {
this.onclose()
const cl = await this.client
await cl.end()
console.log('Closed postgres connection')
})()
}
}
addRef (): void {
this.count++
}
}
export class ClientRef implements PostgresClientReference {
id = generateId()
constructor (readonly client: PostgresClientReferenceImpl) {
clientRefs.set(this.id, this)
}
closed = false
async getClient (): Promise<Pool> {
if (!this.closed) {
return await this.client.getClient()
} else {
throw new PlatformError(unknownStatus('DB client is already closed'))
}
}
close (): void {
// Do not allow double close of mongo connection client
if (!this.closed) {
clientRefs.delete(this.id)
this.closed = true
this.client.close()
}
}
}
/**
* Initialize a workspace connection to DB
* @public
*/
export function getDBClient (connectionString: string, database?: string): PostgresClientReference {
const key = `${connectionString}${process.env.postgree_OPTIONS ?? '{}'}`
let existing = connections.get(key)
if (existing === undefined) {
const pool = new Pool({
connectionString,
application_name: 'transactor',
database
})
existing = new PostgresClientReferenceImpl(pool, () => {
connections.delete(key)
})
connections.set(key, existing)
}
// Add reference and return once closable
existing.addRef()
return new ClientRef(existing)
}
export function convertDoc<T extends Doc> (doc: T, workspaceId: string): DBDoc {
const { _id, _class, createdBy, modifiedBy, modifiedOn, createdOn, space, attachedTo, ...data } = doc as any
return {
_id,
_class,
createdBy,
modifiedBy,
modifiedOn,
createdOn,
space,
attachedTo,
workspaceId,
data
}
}
export function escapeBackticks (str: string): string {
return str.replaceAll("'", "''")
}
export function isOwner (account: Account): boolean {
return account.role === AccountRole.Owner || account._id === core.account.System
}
export class DBCollectionHelper implements DomainHelperOperations {
constructor (
protected readonly client: Pool,
protected readonly workspaceId: WorkspaceId
) {}
domains = new Set<Domain>()
async create (domain: Domain): Promise<void> {}
async exists (domain: Domain): Promise<boolean> {
const exists = await this.client.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_name = '${translateDomain(domain)}'
`)
return exists.rows.length > 0
}
async listDomains (): Promise<Set<Domain>> {
return this.domains
}
async createIndex (domain: Domain, value: string | FieldIndexConfig<Doc>, options?: { name: string }): Promise<void> {}
async dropIndex (domain: Domain, name: string): Promise<void> {}
async listIndexes (domain: Domain): Promise<{ name: string }[]> {
return []
}
async estimatedCount (domain: Domain): Promise<number> {
const res = await this.client.query(`SELECT COUNT(_id) FROM ${translateDomain(domain)} WHERE "workspaceId" = $1`, [
this.workspaceId.name
])
return res.rows[0].count
}
}
export function translateDomain (domain: string): string {
return domain.replaceAll('-', '_')
}
export function parseDocWithProjection<T extends Doc> (doc: DBDoc, projection: Projection<T> | undefined): T {
const { workspaceId, data, ...rest } = doc
for (const key in rest) {
if ((rest as any)[key] === 'NULL') {
if (key === 'attachedTo') {
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete rest[key]
} else {
;(rest as any)[key] = null
}
}
if (key === 'modifiedOn' || key === 'createdOn') {
;(rest as any)[key] = Number.parseInt((rest as any)[key])
}
}
if (projection !== undefined) {
for (const key in data) {
if (!Object.prototype.hasOwnProperty.call(projection, key) || (projection as any)[key] === 0) {
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete data[key]
}
}
}
const res = {
...data,
...rest
} as any as T
return res
}
export function parseDoc<T extends Doc> (doc: DBDoc): T {
const { workspaceId, data, ...rest } = doc
for (const key in rest) {
if ((rest as any)[key] === 'NULL') {
if (key === 'attachedTo') {
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete rest[key]
} else {
;(rest as any)[key] = null
}
}
if (key === 'modifiedOn' || key === 'createdOn') {
;(rest as any)[key] = Number.parseInt((rest as any)[key])
}
}
const res = {
...data,
...rest
} as any as T
return res
}
export interface DBDoc extends Doc {
workspaceId: string
attachedTo?: Ref<Doc>
data: Record<string, any>
}
export function isDataField (field: string): boolean {
return !docFields.includes(field)
}
export const docFields: string[] = [
'_id',
'_class',
'createdBy',
'modifiedBy',
'modifiedOn',
'createdOn',
'space',
'attachedTo'
] as const
export function getUpdateValue (value: any): string {
if (typeof value === 'string') {
return '"' + escapeDoubleQuotes(value) + '"'
}
if (typeof value === 'object') {
return JSON.stringify(value)
}
return value
}
function escapeDoubleQuotes (jsonString: string): string {
const unescapedQuotes = /(?<!\\)"/g
return jsonString.replace(unescapedQuotes, '\\"')
}
export interface JoinProps {
table: string // table to join
path: string // _id.roles, attachedTo.attachedTo, space...
fromAlias: string
fromField: string
toAlias: string // alias for the table
toField: string // field to join on
isReverse: boolean
toClass: Ref<Class<Doc>>
classes?: Ref<Class<Doc>>[] // filter by classes
}

View File

@ -0,0 +1,10 @@
{
"extends": "./node_modules/@hcengineering/platform-rig/profiles/node/tsconfig.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./lib",
"declarationDir": "./types",
"tsBuildInfoFile": ".build/build.tsbuildinfo"
}
}

View File

@ -48,6 +48,7 @@
"@hcengineering/server-collaboration-resources": "^0.6.0", "@hcengineering/server-collaboration-resources": "^0.6.0",
"@hcengineering/server": "^0.6.4", "@hcengineering/server": "^0.6.4",
"@hcengineering/server-storage": "^0.6.0", "@hcengineering/server-storage": "^0.6.0",
"@hcengineering/postgres": "^0.6.0",
"@hcengineering/mongo": "^0.6.1", "@hcengineering/mongo": "^0.6.1",
"@hcengineering/elastic": "^0.6.0", "@hcengineering/elastic": "^0.6.0",
"elastic-apm-node": "~3.26.0", "elastic-apm-node": "~3.26.0",

View File

@ -1,11 +1,13 @@
/* eslint-disable @typescript-eslint/unbound-method */ /* eslint-disable @typescript-eslint/unbound-method */
import { import {
type Branding,
DOMAIN_BENCHMARK, DOMAIN_BENCHMARK,
DOMAIN_BLOB, DOMAIN_BLOB,
DOMAIN_FULLTEXT_BLOB, DOMAIN_FULLTEXT_BLOB,
DOMAIN_MODEL, DOMAIN_MODEL,
DOMAIN_TRANSIENT, DOMAIN_TRANSIENT,
DOMAIN_TX, DOMAIN_TX,
type WorkspaceIdWithUrl,
Hierarchy, Hierarchy,
ModelDb, ModelDb,
type MeasureContext type MeasureContext
@ -33,23 +35,32 @@ import {
TriggersMiddleware, TriggersMiddleware,
TxMiddleware TxMiddleware
} from '@hcengineering/middleware' } from '@hcengineering/middleware'
import { createPostgresAdapter, createPostgresTxAdapter } from '@hcengineering/postgres'
import { createMongoAdapter, createMongoTxAdapter } from '@hcengineering/mongo' import { createMongoAdapter, createMongoTxAdapter } from '@hcengineering/mongo'
import { import {
buildStorageFromConfig,
createNullAdapter, createNullAdapter,
createRekoniAdapter, createRekoniAdapter,
createStorageDataAdapter, createStorageDataAdapter,
createYDocAdapter createYDocAdapter,
storageConfigFromEnv
} from '@hcengineering/server' } from '@hcengineering/server'
import { import {
createBenchmarkAdapter, createBenchmarkAdapter,
createInMemoryAdapter, createInMemoryAdapter,
createPipeline, createPipeline,
type Middleware,
type DbAdapterFactory,
FullTextMiddleware, FullTextMiddleware,
type DbConfiguration, type DbConfiguration,
type MiddlewareCreator, type MiddlewareCreator,
type PipelineContext, type PipelineContext,
type PipelineFactory, type PipelineFactory,
type StorageAdapter type StorageAdapter,
type Pipeline,
type StorageConfiguration,
DummyFullTextAdapter,
type AggregatorStorageAdapter
} from '@hcengineering/server-core' } from '@hcengineering/server-core'
import { createIndexStages } from './indexing' import { createIndexStages } from './indexing'
@ -57,9 +68,11 @@ import { createIndexStages } from './indexing'
* @public * @public
*/ */
export function createServerPipeline ( export function getTxAdapterFactory (
metrics: MeasureContext, metrics: MeasureContext,
dbUrl: string, dbUrls: string,
workspace: WorkspaceIdWithUrl,
branding: Branding | null,
opt: { opt: {
fullTextUrl: string fullTextUrl: string
rekoniUrl: string rekoniUrl: string
@ -71,91 +84,44 @@ export function createServerPipeline (
externalStorage: StorageAdapter externalStorage: StorageAdapter
}, },
extensions?: Partial<DbConfiguration> extensions?: Partial<DbConfiguration>
): DbAdapterFactory {
const conf = getConfig(metrics, dbUrls, workspace, branding, metrics, opt, extensions)
const adapterName = conf.domains[DOMAIN_TX] ?? conf.defaultAdapter
const adapter = conf.adapters[adapterName]
return adapter.factory
}
/**
* @public
*/
export function createServerPipeline (
metrics: MeasureContext,
dbUrls: string,
opt: {
fullTextUrl: string
rekoniUrl: string
indexProcessing: number // 1000
indexParallel: number // 2
disableTriggers?: boolean
usePassedCtx?: boolean
adapterSecurity?: boolean
externalStorage: StorageAdapter
},
extensions?: Partial<DbConfiguration>
): PipelineFactory { ): PipelineFactory {
return (ctx, workspace, upgrade, broadcast, branding) => { return (ctx, workspace, upgrade, broadcast, branding) => {
const metricsCtx = opt.usePassedCtx === true ? ctx : metrics const metricsCtx = opt.usePassedCtx === true ? ctx : metrics
const wsMetrics = metricsCtx.newChild('🧲 session', {}) const wsMetrics = metricsCtx.newChild('🧲 session', {})
const conf: DbConfiguration = { const conf = getConfig(metrics, dbUrls, workspace, branding, wsMetrics, opt, extensions)
domains: {
[DOMAIN_TX]: 'MongoTx',
[DOMAIN_TRANSIENT]: 'InMemory',
[DOMAIN_BLOB]: 'StorageData',
[DOMAIN_FULLTEXT_BLOB]: 'FullTextBlob',
[DOMAIN_MODEL]: 'Null',
[DOMAIN_BENCHMARK]: 'Benchmark',
...extensions?.domains
},
metrics: wsMetrics,
defaultAdapter: extensions?.defaultAdapter ?? 'Mongo',
adapters: {
MongoTx: {
factory: createMongoTxAdapter,
url: dbUrl
},
Mongo: {
factory: createMongoAdapter,
url: dbUrl
},
Null: {
factory: createNullAdapter,
url: ''
},
InMemory: {
factory: createInMemoryAdapter,
url: ''
},
StorageData: {
factory: createStorageDataAdapter,
url: dbUrl
},
FullTextBlob: {
factory: createElasticBackupDataAdapter,
url: opt.fullTextUrl
},
Benchmark: {
factory: createBenchmarkAdapter,
url: ''
},
...extensions?.adapters
},
fulltextAdapter: extensions?.fulltextAdapter ?? {
factory: createElasticAdapter,
url: opt.fullTextUrl,
stages: (adapter, storage, storageAdapter, contentAdapter) =>
createIndexStages(
wsMetrics.newChild('stages', {}),
workspace,
branding,
adapter,
storage,
storageAdapter,
contentAdapter,
opt.indexParallel,
opt.indexProcessing
)
},
serviceAdapters: extensions?.serviceAdapters ?? {},
contentAdapters: {
Rekoni: {
factory: createRekoniAdapter,
contentType: '*',
url: opt.rekoniUrl
},
YDoc: {
factory: createYDocAdapter,
contentType: 'application/ydoc',
url: ''
},
...extensions?.contentAdapters
},
defaultContentAdapter: extensions?.defaultContentAdapter ?? 'Rekoni'
}
const middlewares: MiddlewareCreator[] = [ const middlewares: MiddlewareCreator[] = [
LookupMiddleware.create, LookupMiddleware.create,
ModifiedMiddleware.create, ModifiedMiddleware.create,
PrivateMiddleware.create, PrivateMiddleware.create,
SpaceSecurityMiddleware.create, (ctx: MeasureContext, context: PipelineContext, next?: Middleware) =>
SpaceSecurityMiddleware.create(opt.adapterSecurity ?? false, ctx, context, next),
SpacePermissionsMiddleware.create, SpacePermissionsMiddleware.create,
ConfigurationMiddleware.create, ConfigurationMiddleware.create,
LowLevelMiddleware.create, LowLevelMiddleware.create,
@ -187,3 +153,154 @@ export function createServerPipeline (
return createPipeline(ctx, middlewares, context) return createPipeline(ctx, middlewares, context)
} }
} }
export async function getServerPipeline (
ctx: MeasureContext,
mongodbUri: string,
dbUrl: string | undefined,
wsUrl: WorkspaceIdWithUrl
): Promise<{
pipeline: Pipeline
storageAdapter: AggregatorStorageAdapter
}> {
const dbUrls = dbUrl !== undefined ? `${dbUrl};${mongodbUri}` : mongodbUri
const storageConfig: StorageConfiguration = storageConfigFromEnv()
const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri)
const pipelineFactory = createServerPipeline(
ctx,
dbUrls,
{
externalStorage: storageAdapter,
fullTextUrl: 'http://localhost:9200',
indexParallel: 0,
indexProcessing: 0,
rekoniUrl: '',
usePassedCtx: true,
disableTriggers: true
},
{
fulltextAdapter: {
factory: async () => new DummyFullTextAdapter(),
url: '',
stages: (adapter, storage, storageAdapter, contentAdapter) =>
createIndexStages(
ctx.newChild('stages', {}),
wsUrl,
null,
adapter,
storage,
storageAdapter,
contentAdapter,
0,
0
)
}
}
)
return {
pipeline: await pipelineFactory(ctx, wsUrl, true, () => {}, null),
storageAdapter
}
}
function getConfig (
metrics: MeasureContext,
dbUrls: string,
workspace: WorkspaceIdWithUrl,
branding: Branding | null,
ctx: MeasureContext,
opt: {
fullTextUrl: string
rekoniUrl: string
indexProcessing: number // 1000
indexParallel: number // 2
disableTriggers?: boolean
usePassedCtx?: boolean
externalStorage: StorageAdapter
},
extensions?: Partial<DbConfiguration>
): DbConfiguration {
const metricsCtx = opt.usePassedCtx === true ? ctx : metrics
const wsMetrics = metricsCtx.newChild('🧲 session', {})
const [dbUrl, mongoUrl] = dbUrls.split(';')
const conf: DbConfiguration = {
domains: {
[DOMAIN_TX]: 'Tx',
[DOMAIN_TRANSIENT]: 'InMemory',
[DOMAIN_BLOB]: 'StorageData',
[DOMAIN_FULLTEXT_BLOB]: 'FullTextBlob',
[DOMAIN_MODEL]: 'Null',
[DOMAIN_BENCHMARK]: 'Benchmark',
...extensions?.domains
},
metrics: wsMetrics,
defaultAdapter: extensions?.defaultAdapter ?? 'Main',
adapters: {
Tx: {
factory: mongoUrl !== undefined ? createPostgresTxAdapter : createMongoTxAdapter,
url: dbUrl
},
Main: {
factory: mongoUrl !== undefined ? createPostgresAdapter : createMongoAdapter,
url: dbUrl
},
Null: {
factory: createNullAdapter,
url: ''
},
InMemory: {
factory: createInMemoryAdapter,
url: ''
},
StorageData: {
factory: createStorageDataAdapter,
url: mongoUrl ?? dbUrl
},
FullTextBlob: {
factory: createElasticBackupDataAdapter,
url: opt.fullTextUrl
},
Benchmark: {
factory: createBenchmarkAdapter,
url: ''
},
...extensions?.adapters
},
fulltextAdapter: extensions?.fulltextAdapter ?? {
factory: createElasticAdapter,
url: opt.fullTextUrl,
stages: (adapter, storage, storageAdapter, contentAdapter) =>
createIndexStages(
wsMetrics.newChild('stages', {}),
workspace,
branding,
adapter,
storage,
storageAdapter,
contentAdapter,
opt.indexParallel,
opt.indexProcessing
)
},
serviceAdapters: extensions?.serviceAdapters ?? {},
contentAdapters: {
Rekoni: {
factory: createRekoniAdapter,
contentType: '*',
url: opt.rekoniUrl
},
YDoc: {
factory: createYDocAdapter,
contentType: 'application/ydoc',
url: ''
},
...extensions?.contentAdapters
},
defaultContentAdapter: extensions?.defaultContentAdapter ?? 'Rekoni'
}
return conf
}

View File

@ -23,6 +23,7 @@ import core, {
FindResult, FindResult,
Hierarchy, Hierarchy,
IndexingConfiguration, IndexingConfiguration,
Iterator,
MeasureContext, MeasureContext,
ModelDb, ModelDb,
Ref, Ref,
@ -36,6 +37,7 @@ import { createMongoAdapter } from '@hcengineering/mongo'
import { PlatformError, unknownError } from '@hcengineering/platform' import { PlatformError, unknownError } from '@hcengineering/platform'
import { import {
DbAdapter, DbAdapter,
DbAdapterHandler,
StorageAdapter, StorageAdapter,
type DomainHelperOperations, type DomainHelperOperations,
type StorageAdapterEx type StorageAdapterEx
@ -49,6 +51,29 @@ class StorageBlobAdapter implements DbAdapter {
readonly blobAdapter: DbAdapter // A real blob adapter for Blob documents. readonly blobAdapter: DbAdapter // A real blob adapter for Blob documents.
) {} ) {}
async traverse<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
options?: Pick<FindOptions<T>, 'sort' | 'limit' | 'projection'>
): Promise<Iterator<T>> {
return await this.blobAdapter.traverse(domain, query, options)
}
init?: ((domains?: string[], excludeDomains?: string[]) => Promise<void>) | undefined
on?: ((handler: DbAdapterHandler) => void) | undefined
async rawFindAll<T extends Doc>(domain: Domain, query: DocumentQuery<T>, options?: FindOptions<T>): Promise<T[]> {
return await this.blobAdapter.rawFindAll(domain, query, options)
}
async rawUpdate<T extends Doc>(
domain: Domain,
query: DocumentQuery<T>,
operations: DocumentUpdate<T>
): Promise<void> {
await this.blobAdapter.rawUpdate(domain, query, operations)
}
async findAll<T extends Doc>( async findAll<T extends Doc>(
ctx: MeasureContext, ctx: MeasureContext,
_class: Ref<Class<T>>, _class: Ref<Class<T>>,

View File

@ -45,7 +45,6 @@
"@hcengineering/platform": "^0.6.11", "@hcengineering/platform": "^0.6.11",
"@hcengineering/server-core": "^0.6.1", "@hcengineering/server-core": "^0.6.1",
"@hcengineering/server-ws": "^0.6.11", "@hcengineering/server-ws": "^0.6.11",
"@hcengineering/mongo": "^0.6.1",
"@hcengineering/minio": "^0.6.0", "@hcengineering/minio": "^0.6.0",
"@hcengineering/elastic": "^0.6.0", "@hcengineering/elastic": "^0.6.0",
"elastic-apm-node": "~3.26.0", "elastic-apm-node": "~3.26.0",

View File

@ -39,21 +39,19 @@ import core, {
type TxCUD type TxCUD
} from '@hcengineering/core' } from '@hcengineering/core'
import { consoleModelLogger, MigrateOperation, ModelLogger, tryMigrate } from '@hcengineering/model' import { consoleModelLogger, MigrateOperation, ModelLogger, tryMigrate } from '@hcengineering/model'
import { createMongoTxAdapter, DBCollectionHelper, getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
import { import {
AggregatorStorageAdapter, AggregatorStorageAdapter,
DbAdapter,
DomainIndexHelperImpl, DomainIndexHelperImpl,
StorageAdapter, Pipeline,
StorageConfiguration StorageAdapter
} from '@hcengineering/server-core' } from '@hcengineering/server-core'
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
import { Db, Document } from 'mongodb'
import { connect } from './connect' import { connect } from './connect'
import { InitScript, WorkspaceInitializer } from './initializer' import { InitScript, WorkspaceInitializer } from './initializer'
import toolPlugin from './plugin' import toolPlugin from './plugin'
import { MigrateClientImpl } from './upgrade' import { MigrateClientImpl } from './upgrade'
import { getMetadata } from '@hcengineering/platform' import { getMetadata, PlatformError, unknownError } from '@hcengineering/platform'
import { generateToken } from '@hcengineering/server-token' import { generateToken } from '@hcengineering/server-token'
import fs from 'fs' import fs from 'fs'
import * as yaml from 'js-yaml' import * as yaml from 'js-yaml'
@ -89,6 +87,7 @@ export class FileModelLogger implements ModelLogger {
*/ */
export function prepareTools (rawTxes: Tx[]): { export function prepareTools (rawTxes: Tx[]): {
mongodbUri: string mongodbUri: string
dbUrl: string | undefined
txes: Tx[] txes: Tx[]
} { } {
const mongodbUri = process.env.MONGO_URL const mongodbUri = process.env.MONGO_URL
@ -97,8 +96,11 @@ export function prepareTools (rawTxes: Tx[]): {
process.exit(1) process.exit(1)
} }
const dbUrl = process.env.DB_URL
return { return {
mongodbUri, mongodbUri,
dbUrl,
txes: JSON.parse(JSON.stringify(rawTxes)) as Tx[] txes: JSON.parse(JSON.stringify(rawTxes)) as Tx[]
} }
} }
@ -110,40 +112,38 @@ export async function initModel (
ctx: MeasureContext, ctx: MeasureContext,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
rawTxes: Tx[], rawTxes: Tx[],
adapter: DbAdapter,
storageAdapter: AggregatorStorageAdapter,
logger: ModelLogger = consoleModelLogger, logger: ModelLogger = consoleModelLogger,
progress: (value: number) => Promise<void>, progress: (value: number) => Promise<void>,
deleteFirst: boolean = false deleteFirst: boolean = false
): Promise<void> { ): Promise<void> {
const { mongodbUri, txes } = prepareTools(rawTxes) const { txes } = prepareTools(rawTxes)
if (txes.some((tx) => tx.objectSpace !== core.space.Model)) { if (txes.some((tx) => tx.objectSpace !== core.space.Model)) {
throw Error('Model txes must target only core.space.Model') throw Error('Model txes must target only core.space.Model')
} }
const _client = getMongoClient(mongodbUri)
const client = await _client.getClient()
const storageConfig: StorageConfiguration = storageConfigFromEnv()
const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri)
try { try {
const db = getWorkspaceDB(client, workspaceId)
if (deleteFirst) { if (deleteFirst) {
logger.log('deleting model...', workspaceId) logger.log('deleting model...', workspaceId)
const result = await ctx.with( await ctx.with('mongo-delete', {}, async () => {
'mongo-delete', const toRemove = await adapter.rawFindAll(DOMAIN_TX, {
{}, objectSpace: core.space.Model,
async () => modifiedBy: core.account.System,
await db.collection(DOMAIN_TX).deleteMany({ objectClass: { $nin: [contact.class.PersonAccount, 'contact:class:EmployeeAccount'] }
objectSpace: core.space.Model, })
modifiedBy: core.account.System, await adapter.clean(
objectClass: { $nin: [contact.class.PersonAccount, 'contact:class:EmployeeAccount'] } ctx,
}) DOMAIN_TX,
) toRemove.map((p) => p._id)
logger.log('transactions deleted.', { workspaceId: workspaceId.name, count: result.deletedCount }) )
})
logger.log('transactions deleted.', { workspaceId: workspaceId.name })
} }
logger.log('creating model...', workspaceId) logger.log('creating model...', workspaceId)
const result = await db.collection(DOMAIN_TX).insertMany(txes as Document[]) await adapter.upload(ctx, DOMAIN_TX, txes)
logger.log('model transactions inserted.', { count: result.insertedCount }) logger.log('model transactions inserted.', { count: txes.length })
await progress(30) await progress(30)
@ -159,8 +159,7 @@ export async function initModel (
ctx.error('Failed to create workspace', { error: err }) ctx.error('Failed to create workspace', { error: err })
throw err throw err
} finally { } finally {
await storageAdapter.close() await adapter.close()
_client.close()
} }
} }
@ -169,6 +168,7 @@ export async function updateModel (
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
migrateOperations: [string, MigrateOperation][], migrateOperations: [string, MigrateOperation][],
connection: TxOperations, connection: TxOperations,
pipeline: Pipeline,
logger: ModelLogger = consoleModelLogger, logger: ModelLogger = consoleModelLogger,
progress: (value: number) => Promise<void> progress: (value: number) => Promise<void>
): Promise<void> { ): Promise<void> {
@ -178,14 +178,7 @@ export async function updateModel (
const sts = Array.from(groupByArray(states, (it) => it.plugin).entries()) const sts = Array.from(groupByArray(states, (it) => it.plugin).entries())
const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))])) const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))]))
const { mongodbUri } = prepareTools([])
const _client = getMongoClient(mongodbUri)
const client = await _client.getClient()
try { try {
const db = getWorkspaceDB(client, workspaceId)
let i = 0 let i = 0
for (const op of migrateOperations) { for (const op of migrateOperations) {
logger.log('Migrate', { name: op[0] }) logger.log('Migrate', { name: op[0] })
@ -199,8 +192,7 @@ export async function updateModel (
ctx, ctx,
connection.getHierarchy(), connection.getHierarchy(),
connection.getModel(), connection.getModel(),
db, pipeline,
logger,
async (value) => { async (value) => {
await progress(30 + (Math.min(value, 100) / 100) * 70) await progress(30 + (Math.min(value, 100) / 100) * 70)
}, },
@ -210,8 +202,6 @@ export async function updateModel (
} catch (e: any) { } catch (e: any) {
logger.error('error', { error: e }) logger.error('error', { error: e })
throw e throw e
} finally {
_client.close()
} }
} }
@ -260,218 +250,203 @@ export async function initializeWorkspace (
export async function upgradeModel ( export async function upgradeModel (
ctx: MeasureContext, ctx: MeasureContext,
transactorUrl: string, transactorUrl: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceIdWithUrl,
rawTxes: Tx[], txes: Tx[],
pipeline: Pipeline,
storageAdapter: AggregatorStorageAdapter,
migrateOperations: [string, MigrateOperation][], migrateOperations: [string, MigrateOperation][],
logger: ModelLogger = consoleModelLogger, logger: ModelLogger = consoleModelLogger,
skipTxUpdate: boolean = false, skipTxUpdate: boolean = false,
progress: (value: number) => Promise<void>, progress: (value: number) => Promise<void>,
forceIndexes: boolean = false forceIndexes: boolean = false
): Promise<Tx[]> { ): Promise<Tx[]> {
const { mongodbUri, txes } = prepareTools(rawTxes)
if (txes.some((tx) => tx.objectSpace !== core.space.Model)) { if (txes.some((tx) => tx.objectSpace !== core.space.Model)) {
throw Error('Model txes must target only core.space.Model') throw Error('Model txes must target only core.space.Model')
} }
const _client = getMongoClient(mongodbUri) const prevModel = await fetchModel(ctx, pipeline)
const client = await _client.getClient() const { migrateClient: preMigrateClient } = await prepareMigrationClient(
const storageConfig: StorageConfiguration = storageConfigFromEnv() pipeline,
const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri) prevModel.hierarchy,
prevModel.modelDb,
logger,
storageAdapter,
workspaceId
)
try { await progress(0)
const db = getWorkspaceDB(client, workspaceId) await ctx.with('pre-migrate', {}, async (ctx) => {
let i = 0
const prevModel = await fetchModelFromMongo(ctx, mongodbUri, workspaceId) for (const op of migrateOperations) {
const { migrateClient: preMigrateClient } = await prepareMigrationClient( if (op[1].preMigrate === undefined) {
db, continue
prevModel.hierarchy,
prevModel.modelDb,
logger,
storageAdapter,
workspaceId
)
await progress(0)
await ctx.with('pre-migrate', {}, async (ctx) => {
let i = 0
for (const op of migrateOperations) {
if (op[1].preMigrate === undefined) {
continue
}
const preMigrate = op[1].preMigrate
const t = Date.now()
try {
await ctx.with(op[0], {}, async (ctx) => {
await preMigrate(preMigrateClient, logger)
})
} catch (err: any) {
logger.error(`error during pre-migrate: ${op[0]} ${err.message}`, err)
throw err
}
logger.log('pre-migrate:', { workspaceId: workspaceId.name, operation: op[0], time: Date.now() - t })
await progress(((100 / migrateOperations.length) * i * 10) / 100)
i++
} }
}) const preMigrate = op[1].preMigrate
if (!skipTxUpdate) { const t = Date.now()
logger.log('removing model...', { workspaceId: workspaceId.name }) try {
await progress(10) await ctx.with(op[0], {}, async (ctx) => {
// we're preserving accounts (created by core.account.System). await preMigrate(preMigrateClient, logger)
const result = await ctx.with( })
'mongo-delete', } catch (err: any) {
{}, logger.error(`error during pre-migrate: ${op[0]} ${err.message}`, err)
async () => throw err
await db.collection(DOMAIN_TX).deleteMany({ }
objectSpace: core.space.Model, logger.log('pre-migrate:', { workspaceId: workspaceId.name, operation: op[0], time: Date.now() - t })
modifiedBy: core.account.System, await progress(((100 / migrateOperations.length) * i * 10) / 100)
objectClass: { $nin: [contact.class.PersonAccount, 'contact:class:EmployeeAccount'] } i++
})
)
logger.log('transactions deleted.', { workspaceId: workspaceId.name, count: result.deletedCount })
logger.log('creating model...', { workspaceId: workspaceId.name })
const insert = await ctx.with(
'mongo-insert',
{},
async () => await db.collection(DOMAIN_TX).insertMany(txes as Document[])
)
logger.log('model transactions inserted.', { workspaceId: workspaceId.name, count: insert.insertedCount })
await progress(20)
} }
const newModel = [ })
...txes,
...Array.from(
prevModel.model.filter(
(it) =>
it.modifiedBy !== core.account.System ||
(it as TxCUD<Doc>).objectClass === contact.class.Person ||
(it as TxCUD<Doc>).objectClass === 'contact:class:PersonAccount'
)
)
]
const { hierarchy, modelDb, model } = await fetchModelFromMongo(ctx, mongodbUri, workspaceId, newModel) if (!skipTxUpdate) {
const { migrateClient, migrateState } = await prepareMigrationClient( if (pipeline.context.lowLevelStorage === undefined) {
db, throw new PlatformError(unknownError('Low level storage is not available'))
}
logger.log('removing model...', { workspaceId: workspaceId.name })
await progress(10)
const toRemove = await pipeline.findAll(ctx, core.class.Tx, {
objectSpace: core.space.Model,
modifiedBy: core.account.System,
objectClass: { $nin: [contact.class.PersonAccount, 'contact:class:EmployeeAccount'] }
})
await pipeline.context.lowLevelStorage.clean(
ctx,
DOMAIN_TX,
toRemove.map((p) => p._id)
)
logger.log('transactions deleted.', { workspaceId: workspaceId.name, count: toRemove.length })
logger.log('creating model...', { workspaceId: workspaceId.name })
await pipeline.context.lowLevelStorage.upload(ctx, DOMAIN_TX, txes)
logger.log('model transactions inserted.', { workspaceId: workspaceId.name, count: txes.length })
await progress(20)
}
const newModel = [
...txes,
...Array.from(
prevModel.model.filter(
(it) =>
it.modifiedBy !== core.account.System ||
(it as TxCUD<Doc>).objectClass === contact.class.Person ||
(it as TxCUD<Doc>).objectClass === 'contact:class:PersonAccount'
)
)
]
const { hierarchy, modelDb, model } = await fetchModel(ctx, pipeline, newModel)
const { migrateClient, migrateState } = await prepareMigrationClient(
pipeline,
hierarchy,
modelDb,
logger,
storageAdapter,
workspaceId
)
const upgradeIndexes = async (): Promise<void> => {
ctx.info('Migrate indexes')
// Create update indexes
await createUpdateIndexes(
ctx,
hierarchy, hierarchy,
modelDb, modelDb,
logger, pipeline,
storageAdapter, async (value) => {
await progress(90 + (Math.min(value, 100) / 100) * 10)
},
workspaceId workspaceId
) )
}
if (forceIndexes) {
await upgradeIndexes()
}
const upgradeIndexes = async (): Promise<void> => { await ctx.with('migrate', {}, async (ctx) => {
ctx.info('Migrate indexes') let i = 0
// Create update indexes for (const op of migrateOperations) {
await createUpdateIndexes( const t = Date.now()
ctx, try {
hierarchy, await ctx.with(op[0], {}, async () => {
modelDb, await op[1].migrate(migrateClient, logger)
db, })
logger, } catch (err: any) {
async (value) => { logger.error(`error during migrate: ${op[0]} ${err.message}`, err)
await progress(90 + (Math.min(value, 100) / 100) * 10) throw err
}
logger.log('migrate:', { workspaceId: workspaceId.name, operation: op[0], time: Date.now() - t })
await progress(20 + ((100 / migrateOperations.length) * i * 20) / 100)
i++
}
await tryMigrate(migrateClient, coreId, [
{
state: 'indexes-v4',
func: upgradeIndexes
}
])
})
logger.log('Apply upgrade operations', { workspaceId: workspaceId.name })
let connection: (CoreClient & BackupClient) | undefined
const getUpgradeClient = async (): Promise<CoreClient & BackupClient> =>
await ctx.with('connect-platform', {}, async (ctx) => {
if (connection !== undefined) {
return connection
}
connection = (await connect(
transactorUrl,
workspaceId,
undefined,
{
mode: 'backup',
model: 'upgrade',
admin: 'true'
}, },
workspaceId model
) )) as CoreClient & BackupClient
} return connection
if (forceIndexes) { })
await upgradeIndexes() try {
} await ctx.with('upgrade', {}, async (ctx) => {
await ctx.with('migrate', {}, async (ctx) => {
let i = 0 let i = 0
for (const op of migrateOperations) { for (const op of migrateOperations) {
const t = Date.now() const t = Date.now()
try { await ctx.with(op[0], {}, async () => {
await ctx.with(op[0], {}, async () => { await op[1].upgrade(migrateState, getUpgradeClient, logger)
await op[1].migrate(migrateClient, logger) })
}) logger.log('upgrade:', { operation: op[0], time: Date.now() - t, workspaceId: workspaceId.name })
} catch (err: any) { await progress(60 + ((100 / migrateOperations.length) * i * 30) / 100)
logger.error(`error during migrate: ${op[0]} ${err.message}`, err)
throw err
}
logger.log('migrate:', { workspaceId: workspaceId.name, operation: op[0], time: Date.now() - t })
await progress(20 + ((100 / migrateOperations.length) * i * 20) / 100)
i++ i++
} }
await tryMigrate(migrateClient, coreId, [
{
state: 'indexes-v4',
func: upgradeIndexes
}
])
}) })
logger.log('Apply upgrade operations', { workspaceId: workspaceId.name }) if (connection === undefined) {
// We need to send reboot for workspace
let connection: (CoreClient & BackupClient) | undefined ctx.info('send force close', { workspace: workspaceId.name, transactorUrl })
const getUpgradeClient = async (): Promise<CoreClient & BackupClient> => const serverEndpoint = transactorUrl.replaceAll('wss://', 'https://').replace('ws://', 'http://')
await ctx.with('connect-platform', {}, async (ctx) => { const token = generateToken(systemAccountEmail, workspaceId, { admin: 'true' })
if (connection !== undefined) { try {
return connection await fetch(
} serverEndpoint + `/api/v1/manage?token=${token}&operation=force-close&wsId=${toWorkspaceString(workspaceId)}`,
connection = (await connect(
transactorUrl,
workspaceId,
undefined,
{ {
mode: 'backup', method: 'PUT'
model: 'upgrade', }
admin: 'true' )
}, } catch (err: any) {
model // Ignore error if transactor is not yet ready
)) as CoreClient & BackupClient
return connection
})
try {
await ctx.with('upgrade', {}, async (ctx) => {
let i = 0
for (const op of migrateOperations) {
const t = Date.now()
await ctx.with(op[0], {}, async () => {
await op[1].upgrade(migrateState, getUpgradeClient, logger)
})
logger.log('upgrade:', { operation: op[0], time: Date.now() - t, workspaceId: workspaceId.name })
await progress(60 + ((100 / migrateOperations.length) * i * 30) / 100)
i++
}
})
if (connection === undefined) {
// We need to send reboot for workspace
ctx.info('send force close', { workspace: workspaceId.name, transactorUrl })
const serverEndpoint = transactorUrl.replaceAll('wss://', 'https://').replace('ws://', 'http://')
const token = generateToken(systemAccountEmail, workspaceId, { admin: 'true' })
try {
await fetch(
serverEndpoint +
`/api/v1/manage?token=${token}&operation=force-close&wsId=${toWorkspaceString(workspaceId)}`,
{
method: 'PUT'
}
)
} catch (err: any) {
// Ignore error if transactor is not yet ready
}
} }
} finally {
await connection?.sendForceClose()
await connection?.close()
} }
return model
} finally { } finally {
await storageAdapter.close() await connection?.sendForceClose()
_client.close() await connection?.close()
} }
return model
} }
async function prepareMigrationClient ( async function prepareMigrationClient (
db: Db, pipeline: Pipeline,
hierarchy: Hierarchy, hierarchy: Hierarchy,
model: ModelDb, model: ModelDb,
logger: ModelLogger, logger: ModelLogger,
@ -481,7 +456,7 @@ async function prepareMigrationClient (
migrateClient: MigrateClientImpl migrateClient: MigrateClientImpl
migrateState: Map<string, Set<string>> migrateState: Map<string, Set<string>>
}> { }> {
const migrateClient = new MigrateClientImpl(db, hierarchy, model, logger, storageAdapter, workspaceId) const migrateClient = new MigrateClientImpl(pipeline, hierarchy, model, logger, storageAdapter, workspaceId)
const states = await migrateClient.find<MigrationState>(DOMAIN_MIGRATION, { _class: core.class.MigrationState }) const states = await migrateClient.find<MigrationState>(DOMAIN_MIGRATION, { _class: core.class.MigrationState })
const sts = Array.from(groupByArray(states, (it) => it.plugin).entries()) const sts = Array.from(groupByArray(states, (it) => it.plugin).entries())
const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))])) const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))]))
@ -490,52 +465,51 @@ async function prepareMigrationClient (
return { migrateClient, migrateState } return { migrateClient, migrateState }
} }
export async function fetchModelFromMongo ( export async function fetchModel (
ctx: MeasureContext, ctx: MeasureContext,
mongodbUri: string, pipeline: Pipeline,
workspaceId: WorkspaceId,
model?: Tx[] model?: Tx[]
): Promise<{ hierarchy: Hierarchy, modelDb: ModelDb, model: Tx[] }> { ): Promise<{ hierarchy: Hierarchy, modelDb: ModelDb, model: Tx[] }> {
const hierarchy = new Hierarchy() const hierarchy = new Hierarchy()
const modelDb = new ModelDb(hierarchy) const modelDb = new ModelDb(hierarchy)
const txAdapter = await createMongoTxAdapter(ctx, hierarchy, mongodbUri, workspaceId, modelDb) if (model === undefined) {
const res = await ctx.with('get-model', {}, async (ctx) => await pipeline.loadModel(ctx, 0))
try { model = Array.isArray(res) ? res : res.transactions
model = model ?? (await ctx.with('get-model', {}, async (ctx) => await txAdapter.getModel(ctx)))
await ctx.with('build local model', {}, async () => {
for (const tx of model ?? []) {
try {
hierarchy.tx(tx)
} catch (err: any) {}
}
modelDb.addTxes(ctx, model as Tx[], false)
})
} finally {
await txAdapter.close()
} }
return { hierarchy, modelDb, model }
await ctx.with('build local model', {}, async () => {
for (const tx of model ?? []) {
try {
hierarchy.tx(tx)
} catch (err: any) {}
}
modelDb.addTxes(ctx, model as Tx[], false)
})
return { hierarchy, modelDb, model: model ?? [] }
} }
async function createUpdateIndexes ( async function createUpdateIndexes (
ctx: MeasureContext, ctx: MeasureContext,
hierarchy: Hierarchy, hierarchy: Hierarchy,
model: ModelDb, model: ModelDb,
db: Db, pipeline: Pipeline,
logger: ModelLogger,
progress: (value: number) => Promise<void>, progress: (value: number) => Promise<void>,
workspaceId: WorkspaceId workspaceId: WorkspaceId
): Promise<void> { ): Promise<void> {
const domainHelper = new DomainIndexHelperImpl(ctx, hierarchy, model, workspaceId) const domainHelper = new DomainIndexHelperImpl(ctx, hierarchy, model, workspaceId)
const dbHelper = new DBCollectionHelper(db)
await dbHelper.init()
let completed = 0 let completed = 0
const allDomains = hierarchy.domains() const allDomains = hierarchy.domains()
for (const domain of allDomains) { for (const domain of allDomains) {
if (domain === DOMAIN_MODEL || domain === DOMAIN_TRANSIENT || domain === DOMAIN_BENCHMARK) { if (domain === DOMAIN_MODEL || domain === DOMAIN_TRANSIENT || domain === DOMAIN_BENCHMARK) {
continue continue
} }
const adapter = pipeline.context.adapterManager?.getAdapter(domain, false)
if (adapter === undefined) {
throw new PlatformError(unknownError(`Adapter for domain ${domain} not found`))
}
const dbHelper = adapter.helper()
await domainHelper.checkDomain(ctx, domain, await dbHelper.estimatedCount(domain), dbHelper) await domainHelper.checkDomain(ctx, domain, await dbHelper.estimatedCount(domain), dbHelper)
completed++ completed++
await progress((100 / allDomains.length) * completed) await progress((100 / allDomains.length) * completed)

View File

@ -4,71 +4,42 @@ import {
Domain, Domain,
FindOptions, FindOptions,
Hierarchy, Hierarchy,
isOperator, LowLevelStorage,
MeasureMetricsContext,
ModelDb, ModelDb,
Ref, Ref,
SortingOrder,
WorkspaceId WorkspaceId
} from '@hcengineering/core' } from '@hcengineering/core'
import { MigrateUpdate, MigrationClient, MigrationIterator, MigrationResult, ModelLogger } from '@hcengineering/model' import { MigrateUpdate, MigrationClient, MigrationIterator, ModelLogger } from '@hcengineering/model'
import { StorageAdapter } from '@hcengineering/server-core' import { Pipeline, StorageAdapter } from '@hcengineering/server-core'
import { Db, Document, Filter, Sort, UpdateFilter } from 'mongodb'
/** /**
* Upgrade client implementation. * Upgrade client implementation.
*/ */
export class MigrateClientImpl implements MigrationClient { export class MigrateClientImpl implements MigrationClient {
private readonly lowLevel: LowLevelStorage
constructor ( constructor (
readonly db: Db, readonly pipeline: Pipeline,
readonly hierarchy: Hierarchy, readonly hierarchy: Hierarchy,
readonly model: ModelDb, readonly model: ModelDb,
readonly logger: ModelLogger, readonly logger: ModelLogger,
readonly storageAdapter: StorageAdapter, readonly storageAdapter: StorageAdapter,
readonly workspaceId: WorkspaceId readonly workspaceId: WorkspaceId
) {} ) {
if (this.pipeline.context.lowLevelStorage === undefined) {
throw new Error('lowLevelStorage is not defined')
}
this.lowLevel = this.pipeline.context.lowLevelStorage
}
migrateState = new Map<string, Set<string>>() migrateState = new Map<string, Set<string>>()
private translateQuery<T extends Doc>(query: DocumentQuery<T>): Filter<Document> {
const translated: any = {}
for (const key in query) {
const value = (query as any)[key]
if (value !== null && typeof value === 'object') {
const keys = Object.keys(value)
if (keys[0] === '$like') {
const pattern = value.$like as string
translated[key] = {
$regex: `^${pattern.split('%').join('.*')}$`,
$options: 'i'
}
continue
}
}
translated[key] = value
}
return translated
}
async find<T extends Doc>( async find<T extends Doc>(
domain: Domain, domain: Domain,
query: DocumentQuery<T>, query: DocumentQuery<T>,
options?: FindOptions<T> | undefined options?: FindOptions<T> | undefined
): Promise<T[]> { ): Promise<T[]> {
let cursor = this.db.collection(domain).find<T>(this.translateQuery(query)) return await this.lowLevel.rawFindAll(domain, query, options)
if (options?.limit !== undefined) {
cursor = cursor.limit(options.limit)
}
if (options !== null && options !== undefined) {
if (options.sort !== undefined) {
const sort: Sort = {}
for (const key in options.sort) {
const order = options.sort[key] === SortingOrder.Ascending ? 1 : -1
sort[key] = order
}
cursor = cursor.sort(sort)
}
}
return await cursor.toArray()
} }
async traverse<T extends Doc>( async traverse<T extends Doc>(
@ -76,68 +47,13 @@ export class MigrateClientImpl implements MigrationClient {
query: DocumentQuery<T>, query: DocumentQuery<T>,
options?: FindOptions<T> | undefined options?: FindOptions<T> | undefined
): Promise<MigrationIterator<T>> { ): Promise<MigrationIterator<T>> {
let cursor = this.db.collection(domain).find<T>(this.translateQuery(query)) return await this.lowLevel.traverse(domain, query, options)
if (options?.limit !== undefined) {
cursor = cursor.limit(options.limit)
}
if (options !== null && options !== undefined) {
if (options.sort !== undefined) {
const sort: Sort = {}
for (const key in options.sort) {
const order = options.sort[key] === SortingOrder.Ascending ? 1 : -1
sort[key] = order
}
cursor = cursor.sort(sort)
}
}
return {
next: async (size: number) => {
const docs: T[] = []
while (docs.length < size && (await cursor.hasNext())) {
try {
const d = await cursor.next()
if (d !== null) {
docs.push(d)
} else {
break
}
} catch (err) {
console.error(err)
return null
}
}
return docs
},
close: async () => {
await cursor.close()
}
}
} }
async update<T extends Doc>( async update<T extends Doc>(domain: Domain, query: DocumentQuery<T>, operations: MigrateUpdate<T>): Promise<void> {
domain: Domain,
query: DocumentQuery<T>,
operations: MigrateUpdate<T>
): Promise<MigrationResult> {
const t = Date.now() const t = Date.now()
try { try {
if (isOperator(operations)) { await this.lowLevel.rawUpdate(domain, query, operations)
if (operations?.$set !== undefined) {
operations.$set['%hash%'] = null
} else {
operations = { ...operations, $set: { '%hash%': null } }
}
const result = await this.db
.collection(domain)
.updateMany(this.translateQuery(query), { ...operations } as unknown as UpdateFilter<Document>)
return { matched: result.matchedCount, updated: result.modifiedCount }
} else {
const result = await this.db
.collection(domain)
.updateMany(this.translateQuery(query), { $set: { ...operations, '%hash%': null } })
return { matched: result.matchedCount, updated: result.modifiedCount }
}
} finally { } finally {
if (Date.now() - t > 1000) { if (Date.now() - t > 1000) {
this.logger.log(`update${Date.now() - t > 5000 ? 'slow' : ''}`, { domain, query, time: Date.now() - t }) this.logger.log(`update${Date.now() - t > 5000 ? 'slow' : ''}`, { domain, query, time: Date.now() - t })
@ -148,60 +64,44 @@ export class MigrateClientImpl implements MigrationClient {
async bulk<T extends Doc>( async bulk<T extends Doc>(
domain: Domain, domain: Domain,
operations: { filter: DocumentQuery<T>, update: MigrateUpdate<T> }[] operations: { filter: DocumentQuery<T>, update: MigrateUpdate<T> }[]
): Promise<MigrationResult> { ): Promise<void> {
const result = await this.db.collection(domain).bulkWrite( for (const ops of operations) {
operations.map((it) => ({ await this.lowLevel.rawUpdate(domain, ops.filter, ops.update)
updateOne: { }
filter: this.translateQuery(it.filter),
update: { $set: { ...it.update, '%hash%': null } }
}
}))
)
return { matched: result.matchedCount, updated: result.modifiedCount }
} }
async move<T extends Doc>( async move<T extends Doc>(sourceDomain: Domain, query: DocumentQuery<T>, targetDomain: Domain): Promise<void> {
sourceDomain: Domain, const ctx = new MeasureMetricsContext('move', {})
query: DocumentQuery<T>,
targetDomain: Domain
): Promise<MigrationResult> {
this.logger.log('move', { sourceDomain, query }) this.logger.log('move', { sourceDomain, query })
const q = this.translateQuery(query) while (true) {
const cursor = this.db.collection(sourceDomain).find<T>(q) const source = await this.lowLevel.rawFindAll(sourceDomain, query, { limit: 500 })
const target = this.db.collection(targetDomain) if (source.length === 0) break
const result: MigrationResult = { await this.lowLevel.upload(ctx, targetDomain, source)
matched: 0, await this.lowLevel.clean(
updated: 0 ctx,
sourceDomain,
source.map((p) => p._id)
)
} }
let doc: Document | null
while ((doc = await cursor.next()) != null) {
if ('%hash%' in doc) {
delete doc['%hash%']
}
await target.insertOne(doc)
result.matched++
result.updated++
}
await this.db.collection(sourceDomain).deleteMany(q)
return result
} }
async create<T extends Doc>(domain: Domain, doc: T | T[]): Promise<void> { async create<T extends Doc>(domain: Domain, doc: T | T[]): Promise<void> {
if (Array.isArray(doc)) { const ctx = new MeasureMetricsContext('create', {})
if (doc.length > 0) { await this.lowLevel.upload(ctx, domain, Array.isArray(doc) ? doc : [doc])
await this.db.collection(domain).insertMany(doc as Document[])
}
} else {
await this.db.collection(domain).insertOne(doc as Document)
}
} }
async delete<T extends Doc>(domain: Domain, _id: Ref<T>): Promise<void> { async delete<T extends Doc>(domain: Domain, _id: Ref<T>): Promise<void> {
await this.db.collection(domain).deleteOne({ _id }) const ctx = new MeasureMetricsContext('delete', {})
await this.lowLevel.clean(ctx, domain, [_id])
} }
async deleteMany<T extends Doc>(domain: Domain, query: DocumentQuery<T>): Promise<void> { async deleteMany<T extends Doc>(domain: Domain, query: DocumentQuery<T>): Promise<void> {
await this.db.collection<Doc>(domain).deleteMany(query as any) const ctx = new MeasureMetricsContext('deleteMany', {})
const docs = await this.lowLevel.rawFindAll(domain, query)
await this.lowLevel.clean(
ctx,
domain,
docs.map((d) => d._id)
)
} }
} }

View File

@ -1,5 +1,7 @@
import core, { import core, {
getWorkspaceId, getWorkspaceId,
Hierarchy,
ModelDb,
systemAccountEmail, systemAccountEmail,
TxOperations, TxOperations,
versionToString, versionToString,
@ -24,6 +26,8 @@ import {
import { import {
createIndexStages, createIndexStages,
createServerPipeline, createServerPipeline,
getServerPipeline,
getTxAdapterFactory,
registerServerPlugins, registerServerPlugins,
registerStringLoaders registerStringLoaders
} from '@hcengineering/server-pipeline' } from '@hcengineering/server-pipeline'
@ -118,31 +122,46 @@ export async function createWorkspace (
await handleWsEvent?.('create-started', version, 10) await handleWsEvent?.('create-started', version, 10)
await childLogger.withLog('init-workspace', {}, async (ctx) => { const { mongodbUri, dbUrl } = prepareTools([])
const deleteModelFirst = workspaceInfo.mode === 'creating' const dbUrls = dbUrl !== undefined ? `${dbUrl};${mongodbUri}` : mongodbUri
const hierarchy = new Hierarchy()
const modelDb = new ModelDb(hierarchy)
await initModel(
ctx,
wsId,
txes,
ctxModellogger,
async (value) => {
await handleWsEvent?.('progress', version, 10 + Math.round((Math.min(value, 100) / 100) * 10))
},
deleteModelFirst
)
})
const { mongodbUri } = prepareTools([])
const storageConfig: StorageConfiguration = storageConfigFromEnv() const storageConfig: StorageConfiguration = storageConfigFromEnv()
const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri) const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri)
try { try {
const txFactory = getTxAdapterFactory(ctx, dbUrls, wsUrl, null, {
externalStorage: storageAdapter,
fullTextUrl: 'http://localhost:9200',
indexParallel: 0,
indexProcessing: 0,
rekoniUrl: '',
usePassedCtx: true
})
const txAdapter = await txFactory(ctx, hierarchy, dbUrl ?? mongodbUri, wsId, modelDb, storageAdapter)
await childLogger.withLog('init-workspace', {}, async (ctx) => {
const deleteModelFirst = workspaceInfo.mode === 'creating'
await initModel(
ctx,
wsId,
txes,
txAdapter,
storageAdapter,
ctxModellogger,
async (value) => {
await handleWsEvent?.('progress', version, 10 + Math.round((Math.min(value, 100) / 100) * 10))
},
deleteModelFirst
)
})
registerServerPlugins() registerServerPlugins()
registerStringLoaders() registerStringLoaders()
const factory: PipelineFactory = createServerPipeline( const factory: PipelineFactory = createServerPipeline(
ctx, ctx,
mongodbUri, dbUrls,
{ {
externalStorage: storageAdapter, externalStorage: storageAdapter,
fullTextUrl: 'http://localhost:9200', fullTextUrl: 'http://localhost:9200',
@ -174,7 +193,7 @@ export async function createWorkspace (
const pipeline = await factory(ctx, wsUrl, true, () => {}, null) const pipeline = await factory(ctx, wsUrl, true, () => {}, null)
const client = new TxOperations(wrapPipeline(ctx, pipeline, wsUrl), core.account.System) const client = new TxOperations(wrapPipeline(ctx, pipeline, wsUrl), core.account.System)
await updateModel(ctx, wsId, migrationOperation, client, ctxModellogger, async (value) => { await updateModel(ctx, wsId, migrationOperation, client, pipeline, ctxModellogger, async (value) => {
await handleWsEvent?.('progress', version, 20 + Math.round((Math.min(value, 100) / 100) * 10)) await handleWsEvent?.('progress', version, 20 + Math.round((Math.min(value, 100) / 100) * 10))
}) })
@ -221,13 +240,19 @@ export async function upgradeWorkspace (
if (ws?.version !== undefined && !forceUpdate && versionStr === versionToString(ws.version)) { if (ws?.version !== undefined && !forceUpdate && versionStr === versionToString(ws.version)) {
return return
} }
ctx.info('upgrading', { ctx.info('upgrading', {
force: forceUpdate, force: forceUpdate,
currentVersion: ws?.version !== undefined ? versionToString(ws.version) : '', currentVersion: ws?.version !== undefined ? versionToString(ws.version) : '',
toVersion: versionStr, toVersion: versionStr,
workspace: ws.workspace workspace: ws.workspace
}) })
const wsId = getWorkspaceId(ws.workspace) const wsId: WorkspaceIdWithUrl = {
name: ws.workspace,
workspaceName: ws.workspaceName ?? '',
workspaceUrl: ws.workspaceUrl ?? ''
}
const token = generateToken(systemAccountEmail, wsId, { service: 'workspace' }) const token = generateToken(systemAccountEmail, wsId, { service: 'workspace' })
let progress = 0 let progress = 0
@ -235,14 +260,38 @@ export async function upgradeWorkspace (
void handleWsEvent?.('progress', version, progress) void handleWsEvent?.('progress', version, progress)
}, 5000) }, 5000)
const { mongodbUri, dbUrl } = prepareTools([])
const wsUrl: WorkspaceIdWithUrl = {
name: ws.workspace,
workspaceName: ws.workspaceName ?? '',
workspaceUrl: ws.workspaceUrl ?? ''
}
const { pipeline, storageAdapter } = await getServerPipeline(ctx, mongodbUri, dbUrl, wsUrl)
const contextData = new SessionDataImpl(
systemAccountEmail,
'backup',
true,
{ targets: {}, txes: [] },
wsUrl,
null,
false,
new Map(),
new Map(),
pipeline.context.modelDb
)
ctx.contextData = contextData
try { try {
await handleWsEvent?.('upgrade-started', version, 0) await handleWsEvent?.('upgrade-started', version, 0)
await upgradeModel( await upgradeModel(
ctx, ctx,
await getTransactorEndpoint(token, external ? 'external' : 'internal'), await getTransactorEndpoint(token, external ? 'external' : 'internal'),
getWorkspaceId(ws.workspace), wsId,
txes, txes,
pipeline,
storageAdapter,
migrationOperation, migrationOperation,
logger, logger,
false, false,
@ -254,8 +303,11 @@ export async function upgradeWorkspace (
await handleWsEvent?.('upgrade-done', version, 100, '') await handleWsEvent?.('upgrade-done', version, 100, '')
} catch (err: any) { } catch (err: any) {
ctx.error('upgrade-failed', { message: err.message })
await handleWsEvent?.('ping', version, 0, `Upgrade failed: ${err.message}`) await handleWsEvent?.('ping', version, 0, `Upgrade failed: ${err.message}`)
} finally { } finally {
await pipeline.close()
await storageAdapter.close()
clearInterval(updateProgressHandle) clearInterval(updateProgressHandle)
} }
} }

View File

@ -0,0 +1,10 @@
services:
account:
environment:
- DB_URL=postgresql://postgres:example@postgres:5432
transactor:
environment:
- MONGO_URL=postgresql://postgres:example@postgres:5432;mongodb://mongodb:27018
workspace:
environment:
- DB_URL=postgresql://postgres:example@postgres:5432

View File

@ -8,6 +8,13 @@ services:
ports: ports:
- 27018:27018 - 27018:27018
restart: unless-stopped restart: unless-stopped
postgres:
image: postgres
environment:
- POSTGRES_PASSWORD=example
ports:
- 5433:5432
restart: unless-stopped
minio: minio:
image: 'minio/minio' image: 'minio/minio'
command: server /data --address ":9000" --console-address ":9001" command: server /data --address ":9000" --console-address ":9001"

30
tests/prepare-pg.sh Executable file
View File

@ -0,0 +1,30 @@
#!/usr/bin/env bash
docker compose -p sanity kill
docker compose -p sanity down --volumes
docker compose -p sanity up -d --force-recreate --renew-anon-volumes
docker_exit=$?
if [ ${docker_exit} -eq 0 ]; then
echo "Container started successfully"
else
echo "Container started with errors"
exit ${docker_exit}
fi
if [ "x$DO_CLEAN" == 'xtrue' ]; then
echo 'Do docker Clean'
docker system prune -a -f
fi
./wait-elastic.sh 9201
# Create workspace record in accounts
./tool-pg.sh create-workspace sanity-ws -w SanityTest
# Create user record in accounts
./tool-pg.sh create-account user1 -f John -l Appleseed -p 1234
./tool-pg.sh create-account user2 -f Kainin -l Dirak -p 1234
# Make user the workspace maintainer
./tool-pg.sh confirm-email user1
./tool-pg.sh confirm-email user2
./restore-pg.sh

View File

@ -2,7 +2,7 @@
docker compose -p sanity kill docker compose -p sanity kill
docker compose -p sanity down --volumes docker compose -p sanity down --volumes
docker compose -p sanity up elastic mongodb -d --force-recreate --renew-anon-volumes docker compose -f docker-compose.yaml -p sanity up elastic mongodb postgres -d --force-recreate --renew-anon-volumes
docker_exit=$? docker_exit=$?
if [ ${docker_exit} -eq 0 ]; then if [ ${docker_exit} -eq 0 ]; then
echo "Container started successfully" echo "Container started successfully"

View File

@ -2,7 +2,7 @@
docker compose -p sanity kill docker compose -p sanity kill
docker compose -p sanity down --volumes docker compose -p sanity down --volumes
docker compose -p sanity up -d --force-recreate --renew-anon-volumes docker compose -p sanity -f docker-compose.yaml up -d --force-recreate --renew-anon-volumes
docker_exit=$? docker_exit=$?
if [ ${docker_exit} -eq 0 ]; then if [ ${docker_exit} -eq 0 ]; then
echo "Container started successfully" echo "Container started successfully"

18
tests/restore-pg.sh Executable file
View File

@ -0,0 +1,18 @@
#!/usr/bin/env bash
# Restore workspace contents in mongo/elastic
./tool-pg.sh backup-restore ./sanity-ws sanity-ws
./tool-pg.sh upgrade-workspace sanity-ws
# Re-assign user to workspace.
./tool-pg.sh assign-workspace user1 sanity-ws
./tool-pg.sh assign-workspace user2 sanity-ws
./tool-pg.sh set-user-role user1 sanity-ws OWNER
./tool-pg.sh set-user-role user2 sanity-ws OWNER
./tool-pg.sh configure sanity-ws --enable=*
./tool-pg.sh configure sanity-ws --list
# setup issue createdOn for yesterday
./tool-pg.sh change-field sanity-ws --objectId 65e47f1f1b875b51e3b4b983 --objectClass tracker:class:Issue --attribute createdOn --value $(($(date +%s)*1000 - 86400000)) --type number --domain task

View File

@ -84,7 +84,6 @@ test.describe('candidate/talents tests', () => {
const sourceTalent1 = 'SourceTalent1' const sourceTalent1 = 'SourceTalent1'
await talentDetailsPage.addSource(sourceTalent1) await talentDetailsPage.addSource(sourceTalent1)
await talentDetailsPage.addSocialLinks('Phone', '123123213213') await talentDetailsPage.addSocialLinks('Phone', '123123213213')
await talentDetailsPage.checkSocialLinks('Phone', '123123213213')
// talent 2 // talent 2
await navigationMenuPage.clickButtonTalents() await navigationMenuPage.clickButtonTalents()
@ -96,7 +95,6 @@ test.describe('candidate/talents tests', () => {
const sourceTalent2 = 'SourceTalent2' const sourceTalent2 = 'SourceTalent2'
await talentDetailsPage.addSource(sourceTalent2) await talentDetailsPage.addSource(sourceTalent2)
await talentDetailsPage.addSocialLinks('Email', 'test-merge-2@gmail.com') await talentDetailsPage.addSocialLinks('Email', 'test-merge-2@gmail.com')
await talentDetailsPage.checkSocialLinks('Email', 'test-merge-2@gmail.com')
// merge // merge
await navigationMenuPage.clickButtonTalents() await navigationMenuPage.clickButtonTalents()

13
tests/tool-pg.sh Executable file
View File

@ -0,0 +1,13 @@
#!/usr/bin/env bash
export MINIO_ACCESS_KEY=minioadmin
export MINIO_SECRET_KEY=minioadmin
export MINIO_ENDPOINT=localhost:9002
export ACCOUNTS_URL=http://localhost:3003
export TRANSACTOR_URL=ws://localhost:3334
export MONGO_URL=mongodb://localhost:27018
export ELASTIC_URL=http://localhost:9201
export SERVER_SECRET=secret
export DB_URL=postgresql://postgres:example@localhost:5433
node ../dev/tool/bundle/bundle.js $@