UBERF-4527: Extra logging for client (#4133)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2023-12-04 23:03:21 +07:00 committed by GitHub
parent 4eb07074d0
commit c9e1d9fd51
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 410 additions and 307 deletions

View File

@ -150,7 +150,7 @@ jobs:
- name: Prepare server
run: |
cd ./tests
./prepare.sh
./prepare-tests.sh
- name: Testing...
run: node common/scripts/install-run-rush.js test --verbose
env:

View File

@ -23,8 +23,8 @@ Several teams building products on top of the Platform including [Zenflow](https
- [Node.js](https://nodejs.org/en/download/)
- [Docker](https://docs.docker.com/get-docker/)
- [Docker Compose](https://docs.docker.com/compose/install/)
- Make sure what docker and docker-compose commands are available in your terminal (e.g. `docker --version` and `docker-compose --version`).
- Make sure what docker and docker-compose commands can be executed without sudo (e.g. `docker run hello-world` and `docker-compose --version`).
- Make sure what docker and `docker compose` commands are available in your terminal (e.g. `docker --version` and `docker compose --version`).
- Make sure what docker and `docker compose` commands can be executed without sudo (e.g. `docker run hello-world` and `docker compose --version`).
## Fast start
@ -53,7 +53,7 @@ cd ./dev/
rush build # Will build all the required packages.
rush bundle # Will prepare bundles.
rush docker:build # Will build Docker containers for all applications in the local Docker environment.
docker-compose up -d --force-recreate # Will set up all the containers
docker compose up -d --force-recreate # Will set up all the containers
```
or just:

File diff suppressed because it is too large Load Diff

View File

@ -6,7 +6,7 @@
rush build
rush bundle
rush docker:build
docker-compose up -d --force-recreate
docker compose up -d --force-recreate
```
## Running ElasticVUE to check elastic intance

View File

@ -1,4 +1,4 @@
FROM node:18
FROM node:20
WORKDIR /usr/src/app

View File

@ -38,7 +38,7 @@ export const issuesOptions = (kanban: boolean): ViewOptionsModel => ({
],
orderBy: [
['status', SortingOrder.Ascending],
['priority', SortingOrder.Descending],
['priority', SortingOrder.Ascending],
['modifiedOn', SortingOrder.Descending],
['createdOn', SortingOrder.Descending],
['dueDate', SortingOrder.Ascending],

View File

@ -249,7 +249,17 @@ class Connection implements ClientConnection {
const request = this.requests.get(resp.id)
this.requests.delete(resp.id)
if (resp.error !== undefined) {
console.log('ERROR', promise, request, resp.id)
console.log(
'ERROR',
'request:',
request?.method,
'response-id:',
resp.id,
'error: ',
resp.error,
'result: ',
resp.result
)
promise.reject(new PlatformError(resp.error))
} else {
promise.resolve(resp.result)

View File

@ -1,4 +1,4 @@
FROM node:18-alpine
FROM node:20-alpine
WORKDIR /usr/src/app

View File

@ -48,7 +48,7 @@
"@hcengineering/core": "^0.6.28",
"mongodb": "^4.11.0",
"koa": "^2.13.1",
"koa-router": "^10.1.1",
"koa-router": "^12.0.1",
"koa-bodyparser": "^4.3.0",
"@koa/cors": "^3.1.0",
"@hcengineering/server-tool": "^0.6.0",

View File

@ -1,5 +1,5 @@
FROM node:18-alpine
FROM node:20-alpine
WORKDIR /usr/src/app

View File

@ -1,4 +1,4 @@
FROM node:18-alpine
FROM node:20-alpine
RUN apk add dumb-init
ENV NODE_ENV production

View File

@ -1,4 +1,4 @@
FROM node:18
FROM node:20
ENV NODE_ENV production

View File

@ -1,4 +1,4 @@
FROM node:16
FROM node:20
WORKDIR /usr/src/app

View File

@ -2,5 +2,5 @@ cd ./dev/
rush build # Will build all the required packages.
rush bundle # Will prepare bundles.
rush docker:build # Will build Docker containers for all applications in the local Docker environment.
docker-compose up -d --force-recreate # Will set up all the containers
docker compose up -d --force-recreate # Will set up all the containers

View File

@ -653,26 +653,32 @@ export async function restore (
let loaded = 0
let el = 0
let chunks = 0
while (true) {
const st = Date.now()
const it = await connection.loadChunk(c, idx)
chunks++
try {
while (true) {
const st = Date.now()
const it = await connection.loadChunk(c, idx)
chunks++
idx = it.idx
el += Date.now() - st
idx = it.idx
el += Date.now() - st
for (const [_id, hash] of Object.entries(it.docs)) {
serverChangeset.set(_id as Ref<Doc>, hash)
loaded++
for (const [_id, hash] of Object.entries(it.docs)) {
serverChangeset.set(_id as Ref<Doc>, hash)
loaded++
}
if (el > 2500) {
console.log(' loaded from server', loaded, el, chunks)
el = 0
chunks = 0
}
if (it.finished) {
break
}
}
if (el > 2500) {
console.log(' loaded from server', loaded, el, chunks)
el = 0
chunks = 0
}
if (it.finished) {
break
} finally {
if (idx !== undefined) {
await connection.closeChunk(idx)
}
}
console.log(' loaded', loaded)
@ -815,19 +821,23 @@ export async function restore (
try {
for (const c of domains) {
console.log('loading server changeset for', c)
console.log('processing domain', c)
let retry = 5
let delay = 1
while (retry > 0) {
retry--
try {
await processDomain(c)
if (delay > 1) {
console.log('retry-success')
}
break
} catch (err: any) {
if (retry === 0) {
console.log('error', err)
} else {
console.log('Wait for few seconds for elastic')
await new Promise((resolve) => setTimeout(resolve, 1000))
console.error('error', err)
if (retry !== 0) {
console.log('cool-down to retry', delay)
await new Promise((resolve) => setTimeout(resolve, delay * 1000))
delay++
}
}
}

View File

@ -49,6 +49,7 @@ import { Readable } from 'stream'
export interface SessionContext extends MeasureContext {
userEmail: string
sessionId: string
admin?: boolean
}
/**

View File

@ -138,7 +138,7 @@ class ElasticDataAdapter implements DbAdapter {
}
},
close: async () => {
if (resp !== null) {
if (resp !== null && (resp.body as SearchResponse)?._scroll_id != null) {
await this.client.clearScroll({
scroll_id: (resp.body as SearchResponse)._scroll_id
})

View File

@ -1,4 +1,4 @@
FROM node:18-alpine
FROM node:20-alpine
RUN apk add dumb-init
ENV NODE_ENV production

View File

@ -35,7 +35,7 @@ export async function getUser (storage: ServerStorage, ctx: SessionContext): Pro
}
const account = (await storage.modelDb.findAll(core.class.Account, { email: ctx.userEmail }))[0]
if (account === undefined) {
if (ctx.userEmail === systemAccountEmail) {
if (ctx.userEmail === systemAccountEmail || ctx.admin === true) {
return {
_id: core.account.System,
_class: core.class.Account,

View File

@ -110,12 +110,14 @@ export class ClientSession implements Session {
this.current.find++
const context = ctx as SessionContext
context.userEmail = this.token.email
context.admin = this.token.extra?.admin === 'true'
return await this._pipeline.findAll(context, _class, query, options)
}
async searchFulltext (ctx: MeasureContext, query: SearchQuery, options: SearchOptions): Promise<SearchResult> {
const context = ctx as SessionContext
context.userEmail = this.token.email
context.admin = this.token.extra?.admin === 'true'
return await this._pipeline.searchFulltext(context, query, options)
}
@ -124,6 +126,7 @@ export class ClientSession implements Session {
this.current.tx++
const context = ctx as SessionContext
context.userEmail = this.token.email
context.admin = this.token.extra?.admin === 'true'
const [result, derived, target] = await this._pipeline.tx(context, tx)
let shouldBroadcast = true

View File

@ -14,27 +14,27 @@
//
import core, {
TxFactory,
WorkspaceEvent,
generateId,
toWorkspaceString,
type MeasureContext,
type Ref,
type Space,
type Tx,
TxFactory,
type TxWorkspaceEvent,
WorkspaceEvent,
type WorkspaceId,
generateId,
toWorkspaceString
type WorkspaceId
} from '@hcengineering/core'
import { unknownError } from '@hcengineering/platform'
import { type HelloRequest, type HelloResponse, type Response, readRequest } from '@hcengineering/rpc'
import { readRequest, type HelloRequest, type HelloResponse, type Response } from '@hcengineering/rpc'
import type { Pipeline, SessionContext } from '@hcengineering/server-core'
import { type Token } from '@hcengineering/server-token'
// import WebSocket, { RawData } from 'ws'
import {
LOGGING_ENABLED,
type BroadcastCall,
type ConnectionSocket,
LOGGING_ENABLED,
type PipelineFactory,
type ServerFactory,
type Session,
@ -48,6 +48,16 @@ function timeoutPromise (time: number): Promise<void> {
})
}
/**
* @public
*/
export interface Timeouts {
// Timeout preferences
pingTimeout: number // Default 1 second
shutdownWarmTimeout: number // Default 1 minute
reconnectTimeout: number // Default 3 seconds
}
class TSessionManager implements SessionManager {
readonly workspaces = new Map<string, Workspace>()
checkInterval: any
@ -60,11 +70,12 @@ class TSessionManager implements SessionManager {
constructor (
readonly ctx: MeasureContext,
readonly sessionFactory: (token: Token, pipeline: Pipeline, broadcast: BroadcastCall) => Session
readonly sessionFactory: (token: Token, pipeline: Pipeline, broadcast: BroadcastCall) => Session,
readonly timeouts: Timeouts
) {
this.checkInterval = setInterval(() => {
this.handleInterval()
}, 1000)
}, timeouts.pingTimeout)
}
scheduleMaintenance (timeMinutes: number): void {
@ -225,9 +236,6 @@ class TSessionManager implements SessionManager {
workspace.sessions = new Map()
workspace.upgrade = token.extra?.model === 'upgrade'
}
if (LOGGING_ENABLED) {
console.log(token.workspace.name, 'no sessions for workspace', wsString)
}
// Re-create pipeline.
workspace.pipeline = pipelineFactory(ctx, token.workspace, true, (tx, targets) => {
this.broadcastAll(workspace, tx, targets)
@ -314,9 +322,6 @@ class TSessionManager implements SessionManager {
code: number,
reason: string
): Promise<void> {
if (this.checkInterval !== undefined) {
clearInterval(this.checkInterval)
}
// if (LOGGING_ENABLED) console.log(workspaceId.name, `closing websocket, code: ${code}, reason: ${reason}`)
const wsid = toWorkspaceString(workspaceId)
const workspace = this.workspaces.get(wsid)
@ -336,7 +341,7 @@ class TSessionManager implements SessionManager {
setTimeout(() => {
this.reconnectIds.delete(sessionRef.session.sessionId)
}, 3000)
}, this.timeouts.reconnectTimeout)
try {
sessionRef.socket.close()
} catch (err) {
@ -348,11 +353,11 @@ class TSessionManager implements SessionManager {
await this.setStatus(ctx, sessionRef.session, false)
}
if (!workspace.upgrade) {
// Wait few second's for new client to appear before closing workspace.
// Wait some time for new client to appear before closing workspace.
if (workspace.sessions.size === 0) {
setTimeout(() => {
void this.performWorkspaceCloseCheck(workspace, workspaceId, wsid)
}, 5000)
}, this.timeouts.shutdownWarmTimeout)
}
} else {
await this.performWorkspaceCloseCheck(workspace, workspaceId, wsid)
@ -409,6 +414,9 @@ class TSessionManager implements SessionManager {
}
async closeWorkspaces (ctx: MeasureContext): Promise<void> {
if (this.checkInterval !== undefined) {
clearInterval(this.checkInterval)
}
for (const w of this.workspaces) {
await this.closeAll(ctx, w[0], w[1], 1, 'shutdown')
}
@ -552,7 +560,6 @@ class TSessionManager implements SessionManager {
const f = (service as any)[request.method]
try {
const params = [...request.params]
const result = await ctx.with('call', {}, async (callTx) => f.apply(service, [callTx, ...params]))
const resp: Response<any> = { id: request.id, result }
@ -569,7 +576,8 @@ class TSessionManager implements SessionManager {
if (LOGGING_ENABLED) console.error(err)
const resp: Response<any> = {
id: request.id,
error: unknownError(err)
error: unknownError(err),
result: JSON.parse(JSON.stringify(err?.stack))
}
await ws.send(ctx, resp, service.binaryResponseMode, service.useCompression)
}
@ -624,9 +632,13 @@ export function start (
productId: string
serverFactory: ServerFactory
enableCompression?: boolean
}
} & Partial<Timeouts>
): () => Promise<void> {
const sessions = new TSessionManager(ctx, opt.sessionFactory)
const sessions = new TSessionManager(ctx, opt.sessionFactory, {
pingTimeout: opt.pingTimeout ?? 1000,
shutdownWarmTimeout: opt.shutdownWarmTimeout ?? 60 * 1000,
reconnectTimeout: 3000
})
return opt.serverFactory(
sessions,
(rctx, service, ws, msg, workspace) => sessions.handleRequest(rctx, service, ws, msg, workspace),

View File

@ -6,4 +6,4 @@ rush bundle
rush docker:build
# Re-assign user to workspace.
docker-compose -p sanity up $1 -d --force-recreate
docker compose -p sanity up $1 -d --force-recreate

7
tests/prepare-tests.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
docker compose -p sanity kill
docker compose -p sanity down --volumes
docker compose -p sanity up elastic mongodb -d --force-recreate --renew-anon-volumes
./wait-elastic.sh 9201

View File

@ -1,8 +1,8 @@
#!/bin/bash
docker-compose -p sanity kill
docker-compose -p sanity down --volumes
docker-compose -p sanity up -d --force-recreate --renew-anon-volumes
docker compose -p sanity kill
docker compose -p sanity down --volumes
docker compose -p sanity up -d --force-recreate --renew-anon-volumes
./wait-elastic.sh 9201

View File

@ -13,9 +13,9 @@
"ci": "playwright install --with-deps chromium",
"test": "",
"uitest": "playwright test --browser chromium --reporter list,html,allure-playwright -c ./tests/playwright.config.ts",
"dev-uitest": "cross-env PLATFORM_URI=http://localhost:8080 PLATFORM_TRANSACTOR=ws://localhost:3333 SETTING=storage-dev.json playwright test --browser chromium --reporter list,html,allure-playwright -c ./tests/playwright.config.ts",
"dev-uitest": "cross-env PLATFORM_URI=http://localhost:8080 PLATFORM_TRANSACTOR=ws://localhost:3333 SETTING=storage-dev.json SETTING_SECOND=storageSecond-dev.json playwright test --browser chromium --reporter list,html,allure-playwright -c ./tests/playwright.config.ts",
"debug": "playwright test --browser chromium -c ./tests/playwright.config.ts --debug --headed",
"dev-debug": "cross-env PLATFORM_URI=http://localhost:8080 PLATFORM_TRANSACTOR=ws://localhost:3333 SETTING=storage-dev.json playwright test --browser chromium -c ./tests/playwright.config.ts --debug --headed",
"dev-debug": "cross-env PLATFORM_URI=http://localhost:8080 PLATFORM_TRANSACTOR=ws://localhost:3333 SETTING=storage-dev.json SETTING_SECOND=storageSecond-dev.json playwright test --browser chromium -c ./tests/playwright.config.ts --debug --headed",
"codegen": "playwright codegen --load-storage storage.json http://localhost:8083/workbench/sanity-ws/",
"dev-codegen": "cross-env playwright codegen --load-storage storage-dev.json http://localhost:8080/workbench/sanity-ws/",
"allure:generate": "allure generate allure-results -o allure-report --clean"
@ -34,8 +34,8 @@
"prettier": "^3.1.0",
"prettier-plugin-svelte": "^3.1.0",
"typescript": "^5.2.2",
"@playwright/test": "~1.23.0",
"allure-playwright": "2.9.2"
"@playwright/test": "^1.40.1",
"allure-playwright": "^2.9.2"
},
"dependencies": {
"dotenv": "~16.0.0",

View File

@ -0,0 +1,42 @@
{
"cookies": [],
"origins": [
{
"origin": "http://localhost:8080",
"localStorage": [
{
"name": "login:metadata:LoginEmail",
"value": "user1"
},
{
"name": "login:metadata:LoginTokens",
"value": "{\"sanity-ws\":\"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJlbWFpbCI6InVzZXIyIiwid29ya3NwYWNlIjoic2FuaXR5LXdzIiwicHJvZHVjdElkIjoiIn0.YLSe7l0JNwyohgmqdGhnf8MpJZc7l7ZfoP80so3ZZN4\"}"
},
{
"name": "login:metadata:LoginEndpoint",
"value": "ws://localhost:3333"
},
{
"name": "#platform.notification.timeout",
"value": "0"
},
{
"name": "#platform.testing.enabled",
"value": "true"
},
{
"name": "#platform.notification.logging",
"value": "false"
},
{
"name": "#platform.lazy.loading",
"value": "false"
},
{
"name": "flagOpenInDesktopApp",
"value": "true"
}
]
}
]
}

View File

@ -10,7 +10,7 @@
},
{
"name": "login:metadata:LoginTokens",
"value": "{\"sanity-ws\":\"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJjb25maXJtZWQiOnRydWUsImVtYWlsIjoidXNlcjIiLCJ3b3Jrc3BhY2UiOiJzYW5pdHktd3MiLCJwcm9kdWN0SWQiOiIifQ.810B-zY0JjmKWTeTNmhUWKPY4wkp5hGX3Ng6Y73NK7I\"}"
"value": "{\"sanity-ws\":\"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJlbWFpbCI6InVzZXIyIiwid29ya3NwYWNlIjoic2FuaXR5LXdzIiwicHJvZHVjdElkIjoiIn0.YLSe7l0JNwyohgmqdGhnf8MpJZc7l7ZfoP80so3ZZN4\"}"
},
{
"name": "login:metadata:LoginEndpoint",

View File

@ -1,21 +1,21 @@
import { test, expect, Page } from '@playwright/test'
import { Page, expect, test } from '@playwright/test'
import { allure } from 'allure-playwright'
import { IssuesPage } from '../model/tracker/issues-page'
import { PlatformSetting, expectToContainsOrdered, fillSearch, generateId } from '../utils'
import {
checkIssueFromList,
createIssue,
createComponent,
createMilestone,
DEFAULT_STATUSES,
DEFAULT_USER,
IssueProps,
navigate,
PRIORITIES,
ViewletSelectors,
checkIssueFromList,
createComponent,
createIssue,
createMilestone,
navigate,
setViewGroup,
setViewOrder,
ViewletSelectors
setViewOrder
} from './tracker.utils'
import { fillSearch, generateId, PlatformSetting } from '../utils'
import { allure } from 'allure-playwright'
import { IssuesPage } from '../model/tracker/issues-page'
test.use({
storageState: PlatformSetting
@ -129,7 +129,9 @@ test.describe('tracker layout tests', () => {
const issuesPage = new IssuesPage(page)
await issuesPage.modelSelectorAll.click()
await page.click(ViewletSelectors.Table)
await expect(locator).toContainText(groupLabels)
for (const g of groupLabels) {
await expect(locator).toContainText(g)
}
for (const issueName of issueNames) {
await checkIssueFromList(page, issueName)
@ -190,9 +192,7 @@ test.describe('tracker layout tests', () => {
await fillSearch(page, id)
await expect(locator).toContainText(orderedIssueNames, {
timeout: 5000
})
await expectToContainsOrdered(locator, orderedIssueNames)
})
}
})

View File

@ -1,4 +1,7 @@
import { expect, test } from '@playwright/test'
import { allure } from 'allure-playwright'
import { IssuesPage } from '../model/tracker/issues-page'
import { PlatformSetting, fillSearch, generateId } from '../utils'
import {
DEFAULT_STATUSES,
DEFAULT_USER,
@ -11,9 +14,6 @@ import {
openIssue,
toTime
} from './tracker.utils'
import { PlatformSetting, fillSearch, generateId } from '../utils'
import { allure } from 'allure-playwright'
import { IssuesPage } from '../model/tracker/issues-page'
test.use({
storageState: PlatformSetting
@ -48,7 +48,9 @@ test.describe('Tracker tests', () => {
await page.locator(`.antiNav-element__dropbox > a > .antiNav-element:has-text("${panel}")`).click()
await page.locator(`.ac-header .overflow-label:has-text("${mode}")`).click()
await page.click(ViewletSelectors.Table)
await expect(locator).toContainText(statuses)
for (const s of statuses) {
await expect(locator).toContainText(s)
}
if (excluded.length > 0) {
await expect(locator).not.toContainText(excluded)
}

View File

@ -1,4 +1,4 @@
import { Browser, Locator, Page } from '@playwright/test'
import { Browser, Locator, Page, expect } from '@playwright/test'
export const PlatformURI = process.env.PLATFORM_URI as string
export const PlatformTransactor = process.env.PLATFORM_TRANSACTOR as string
@ -64,3 +64,7 @@ export async function getSecondPage (browser: Browser): Promise<Page> {
const userSecondContext = await browser.newContext({ storageState: PlatformSettingSecond })
return await userSecondContext.newPage()
}
export function expectToContainsOrdered (val: Locator, text: string[], timeout?: number): Promise<void> {
const origIssuesExp = new RegExp('.*' + text.join('.*') + '.*')
return expect(val).toHaveText(origIssuesExp, { timeout })
}