UBERF-8592: Fix live query performance (#7189)

Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2024-11-21 14:38:14 +07:00 committed by GitHub
parent 13df48d7c9
commit b5f24fbd5d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 674 additions and 380 deletions

View File

@ -36,7 +36,8 @@ services:
- "8089:8080" - "8089:8080"
command: start-single-node --insecure command: start-single-node --insecure
volumes: volumes:
- cockroach_db:/cockroach/cockroach-data" - cockroach_db:/cockroach/cockroach-data
restart: unless-stopped
minio: minio:
image: 'minio/minio' image: 'minio/minio'
command: server /data --address ":9000" --console-address ":9001" command: server /data --address ":9000" --console-address ":9001"

View File

@ -19,9 +19,10 @@
"docker:tbuild": "docker build -t hardcoreeng/tool . --platform=linux/amd64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/tool", "docker:tbuild": "docker build -t hardcoreeng/tool . --platform=linux/amd64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/tool",
"docker:staging": "../../common/scripts/docker_tag.sh hardcoreeng/tool staging", "docker:staging": "../../common/scripts/docker_tag.sh hardcoreeng/tool staging",
"docker:push": "../../common/scripts/docker_tag.sh hardcoreeng/tool", "docker:push": "../../common/scripts/docker_tag.sh hardcoreeng/tool",
"run-local": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 DB_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --expose-gc --max-old-space-size=18000 ./bundle/bundle.js", "run-local": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost ACCOUNT_DB_URL=mongodb://localhost:27017 DB_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --expose-gc --max-old-space-size=18000 ./bundle/bundle.js",
"run-local-pg": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 DB_URL=postgresql://postgres:example@localhost:5432 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --expose-gc --max-old-space-size=18000 ./bundle/bundle.js", "run-local-pg": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost ACCOUNT_DB_URL=mongodb://localhost:27017 DB_URL=postgresql://postgres:example@localhost:5432 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --expose-gc --max-old-space-size=18000 ./bundle/bundle.js",
"run-local-brk": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost MONGO_URL=mongodb://localhost:27017 DB_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --inspect-brk --enable-source-maps --max-old-space-size=18000 ./bundle/bundle.js", "run-local-cr": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3332 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost ACCOUNT_DB_URL=mongodb://localhost:27017 DB_URL=postgresql://root@host.docker.internal:26257/defaultdb?sslmode=disable TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --expose-gc --max-old-space-size=18000 ./bundle/bundle.js",
"run-local-brk": "rush bundle --to @hcengineering/tool >/dev/null && cross-env SERVER_SECRET=secret ACCOUNTS_URL=http://localhost:3000 TRANSACTOR_URL=ws://localhost:3333 MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost ACCOUNT_DB_URL=mongodb://localhost:27017 DB_URL=mongodb://localhost:27017 TELEGRAM_DATABASE=telegram-service ELASTIC_URL=http://localhost:9200 REKONI_URL=http://localhost:4004 MODEL_VERSION=$(node ../../common/scripts/show_version.js) GIT_REVISION=$(git describe --all --long) node --inspect-brk --enable-source-maps --max-old-space-size=18000 ./bundle/bundle.js",
"run": "rush bundle --to @hcengineering/tool >/dev/null && cross-env node --max-old-space-size=8000 ./bundle/bundle.js", "run": "rush bundle --to @hcengineering/tool >/dev/null && cross-env node --max-old-space-size=8000 ./bundle/bundle.js",
"upgrade": "rushx run-local upgrade", "upgrade": "rushx run-local upgrade",
"format": "format src", "format": "format src",

View File

@ -92,4 +92,13 @@ export function getMongoDBUrl (): string {
return url return url
} }
export function getAccountDBUrl (): string {
const url = process.env.ACCOUNT_DB_URL
if (url === undefined) {
console.error('please provide mongo ACCOUNT_DB_URL')
process.exit(1)
}
return url
}
devTool(prepareTools) devTool(prepareTools)

View File

@ -100,7 +100,7 @@ import { backupDownload } from '@hcengineering/server-backup/src/backup'
import type { PipelineFactory, StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core' import type { PipelineFactory, StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core'
import { deepEqual } from 'fast-equals' import { deepEqual } from 'fast-equals'
import { createWriteStream, readFileSync } from 'fs' import { createWriteStream, readFileSync } from 'fs'
import { getMongoDBUrl } from './__start' import { getAccountDBUrl, getMongoDBUrl } from './__start'
import { import {
benchmark, benchmark,
benchmarkWorker, benchmarkWorker,
@ -429,8 +429,10 @@ export function devTool (
.option('-f|--force [force]', 'Force update', true) .option('-f|--force [force]', 'Force update', true)
.option('-i|--indexes [indexes]', 'Force indexes rebuild', false) .option('-i|--indexes [indexes]', 'Force indexes rebuild', false)
.action(async (workspace, cmd: { force: boolean, indexes: boolean }) => { .action(async (workspace, cmd: { force: boolean, indexes: boolean }) => {
const { dbUrl, version, txes, migrateOperations } = prepareTools() const { version, txes, migrateOperations } = prepareTools()
await withDatabase(dbUrl, async (db) => {
const accountUrl = getAccountDBUrl()
await withDatabase(accountUrl, async (db) => {
const info = await getWorkspaceById(db, workspace) const info = await getWorkspaceById(db, workspace)
if (info === null) { if (info === null) {
throw new Error(`workspace ${workspace} not found`) throw new Error(`workspace ${workspace} not found`)
@ -468,16 +470,18 @@ export function devTool (
.description('upgrade') .description('upgrade')
.option('-l|--logs <logs>', 'Default logs folder', './logs') .option('-l|--logs <logs>', 'Default logs folder', './logs')
.option('-i|--ignore [ignore]', 'Ignore workspaces', '') .option('-i|--ignore [ignore]', 'Ignore workspaces', '')
.option('-r|--region [region]', 'Region of workspaces', '')
.option( .option(
'-c|--console', '-c|--console',
'Display all information into console(default will create logs folder with {workspace}.log files', 'Display all information into console(default will create logs folder with {workspace}.log files',
false false
) )
.option('-f|--force [force]', 'Force update', false) .option('-f|--force [force]', 'Force update', false)
.action(async (cmd: { logs: string, force: boolean, console: boolean, ignore: string }) => { .action(async (cmd: { logs: string, force: boolean, console: boolean, ignore: string, region: string }) => {
const { dbUrl, version, txes, migrateOperations } = prepareTools() const { version, txes, migrateOperations } = prepareTools()
await withDatabase(dbUrl, async (db) => { const accountUrl = getAccountDBUrl()
const workspaces = (await listWorkspacesRaw(db)).filter((ws) => !cmd.ignore.includes(ws.workspace)) await withDatabase(accountUrl, async (db) => {
const workspaces = (await listWorkspacesRaw(db, cmd.region)).filter((ws) => !cmd.ignore.includes(ws.workspace))
workspaces.sort((a, b) => b.lastVisit - a.lastVisit) workspaces.sort((a, b) => b.lastVisit - a.lastVisit)
const measureCtx = new MeasureMetricsContext('upgrade', {}) const measureCtx = new MeasureMetricsContext('upgrade', {})

View File

@ -183,7 +183,7 @@ export abstract class MemDb extends TxProcessor implements Storage {
result = matchQuery(result, query, _class, this.hierarchy) result = matchQuery(result, query, _class, this.hierarchy)
} }
if (options?.sort !== undefined) await resultSort(result, options?.sort, _class, this.hierarchy, this) if (options?.sort !== undefined) resultSort(result, options?.sort, _class, this.hierarchy, this)
const total = result.length const total = result.length
result = result.slice(0, options?.limit) result = result.slice(0, options?.limit)
const tresult = this.hierarchy.clone(result) as WithLookup<T>[] const tresult = this.hierarchy.clone(result) as WithLookup<T>[]

View File

@ -1,10 +1,10 @@
import { DocumentQuery } from '.' import { DocumentQuery, type MemDb } from '.'
import { Class, Doc, Enum, EnumOf, Ref } from './classes' import { Class, Doc, Enum, EnumOf, Ref } from './classes'
import core from './component' import core from './component'
import { Hierarchy } from './hierarchy' import { Hierarchy } from './hierarchy'
import { getObjectValue } from './objvalue' import { getObjectValue } from './objvalue'
import { createPredicates, isPredicate } from './predicate' import { createPredicates, isPredicate } from './predicate'
import { SortQuerySelector, SortingOrder, SortingQuery, SortingRules, Storage } from './storage' import { SortQuerySelector, SortingOrder, SortingQuery, SortingRules } from './storage'
/** /**
* @public * @public
@ -47,14 +47,14 @@ function getEnumValue<T extends Doc> (
/** /**
* @public * @public
*/ */
export async function resultSort<T extends Doc> ( export function resultSort<T extends Doc> (
result: T[], result: T[],
sortOptions: SortingQuery<T>, sortOptions: SortingQuery<T>,
_class: Ref<Class<T>>, _class: Ref<Class<T>>,
hierarchy: Hierarchy, hierarchy: Hierarchy,
modelDb: Storage modelDb: MemDb
): Promise<void> { ): void {
const enums = await getEnums(_class, sortOptions, hierarchy, modelDb) const enums = getEnums(_class, sortOptions, hierarchy, modelDb)
const sortFunc = (a: any, b: any): number => { const sortFunc = (a: any, b: any): number => {
for (const key in sortOptions) { for (const key in sortOptions) {
const _enum = enums[key] const _enum = enums[key]
@ -116,12 +116,12 @@ function getSortingResult (aValue: any, bValue: any, order: SortingOrder | Sorti
return res * orderOrder return res * orderOrder
} }
async function getEnums<T extends Doc> ( function getEnums<T extends Doc> (
_class: Ref<Class<T>>, _class: Ref<Class<T>>,
sortOptions: SortingQuery<T>, sortOptions: SortingQuery<T>,
hierarchy: Hierarchy, hierarchy: Hierarchy,
modelDb: Storage modelDb: MemDb
): Promise<Record<string, Enum>> { ): Record<string, Enum> {
const res: Record<string, Enum> = {} const res: Record<string, Enum> = {}
for (const key in sortOptions) { for (const key in sortOptions) {
const attr = hierarchy.findAttribute(_class, key) const attr = hierarchy.findAttribute(_class, key)
@ -129,7 +129,7 @@ async function getEnums<T extends Doc> (
if (attr !== undefined) { if (attr !== undefined) {
if (attr.type._class === core.class.EnumOf) { if (attr.type._class === core.class.EnumOf) {
const ref = (attr.type as EnumOf).of const ref = (attr.type as EnumOf).of
const enu = await modelDb.findAll(core.class.Enum, { _id: ref }) const enu = modelDb.findAllSync(core.class.Enum, { _id: ref })
res[key] = enu[0] res[key] = enu[0]
} }
} }

View File

@ -240,6 +240,14 @@ export function getClient (): TxOperations & Client & OptimisticTxes {
let txQueue: Tx[] = [] let txQueue: Tx[] = []
export type RefreshListener = () => void
const refreshListeners = new Set<RefreshListener>()
export function addRefreshListener (r: RefreshListener): void {
refreshListeners.add(r)
}
/** /**
* @public * @public
*/ */
@ -292,6 +300,9 @@ export async function refreshClient (clean: boolean): Promise<void> {
for (const q of globalQueries) { for (const q of globalQueries) {
q.refreshClient() q.refreshClient()
} }
for (const listener of refreshListeners.values()) {
listener()
}
} }
} }

View File

@ -18,6 +18,7 @@ import core, {
createClient, createClient,
Doc, Doc,
generateId, generateId,
MeasureMetricsContext,
Ref, Ref,
SortingOrder, SortingOrder,
Space, Space,
@ -757,6 +758,10 @@ describe('query', () => {
) )
}) })
await new Promise((resolve) => {
setTimeout(resolve, 1)
})
await factory.updateDoc(core.class.Space, core.space.Model, futureSpace, { await factory.updateDoc(core.class.Space, core.space.Model, futureSpace, {
name: '1' name: '1'
}) })
@ -974,4 +979,44 @@ describe('query', () => {
projects = await liveQuery.queryFind(test.mixin.TestProjectMixin, {}, { projection: { _id: 1 } }) projects = await liveQuery.queryFind(test.mixin.TestProjectMixin, {}, { projection: { _id: 1 } })
expect(projects.length).toEqual(1) expect(projects.length).toEqual(1)
}) })
jest.setTimeout(25000)
it('test clone ops', async () => {
const { liveQuery, factory } = await getClient()
const counter = 10000
const ctx = new MeasureMetricsContext('tool', {})
let data: Space[] = []
const pp = new Promise((resolve) => {
liveQuery.query<Space>(
test.class.TestProject,
{ private: false },
(result) => {
data = result
if (data.length % 1000 === 0) {
console.info(data.length)
}
if (data.length === counter) {
resolve(null)
}
},
{}
)
})
for (let i = 0; i < counter; i++) {
await ctx.with('create-doc', {}, () =>
factory.createDoc(test.class.TestProject, core.space.Space, {
archived: false,
description: '',
members: [],
private: false,
prjName: 'test project',
name: 'qwe'
})
)
}
expect(data.length).toBe(counter)
await pp
})
}) })

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,91 @@
import {
clone,
Hierarchy,
matchQuery,
toFindResult,
type Class,
type Doc,
type DocumentQuery,
type FindOptions,
type FindResult,
type Ref,
type Timestamp
} from '@hcengineering/core'
import type { Query, QueryId } from './types'
export interface DocumentRef {
doc: Doc
queries: QueryId[]
lastUsed: Timestamp
}
export class Refs {
// A map of _class to documents.
private readonly documentRefs = new Map<string, Map<Ref<Doc>, DocumentRef>>()
constructor (readonly getHierarchy: () => Hierarchy) {}
public updateDocuments (q: Query, docs: Doc[], clean: boolean = false): void {
if (q.options?.projection !== undefined) {
return
}
for (const d of docs) {
const classKey = Hierarchy.mixinOrClass(d) + ':' + JSON.stringify(q.options?.lookup ?? {})
let docMap = this.documentRefs.get(classKey)
if (docMap === undefined) {
if (clean) {
continue
}
docMap = new Map()
this.documentRefs.set(classKey, docMap)
}
const queries = (docMap.get(d._id)?.queries ?? []).filter((it) => it !== q.id)
if (!clean) {
queries.push(q.id)
}
if (queries.length === 0) {
docMap.delete(d._id)
} else {
const q = docMap.get(d._id)
if ((q?.lastUsed ?? 0) < d.modifiedOn) {
docMap.set(d._id, { ...(q ?? {}), doc: d, queries, lastUsed: d.modifiedOn })
}
}
}
}
public findFromDocs<T extends Doc>(
_class: Ref<Class<Doc>>,
query: DocumentQuery<Doc>,
options?: FindOptions<T>
): FindResult<T> | null {
const classKey = _class + ':' + JSON.stringify(options?.lookup ?? {})
if (typeof query._id === 'string') {
// One document query
const doc = this.documentRefs.get(classKey)?.get(query._id)?.doc
if (doc !== undefined) {
const q = matchQuery([doc], query, _class, this.getHierarchy())
if (q.length > 0) {
return toFindResult(clone([doc]), 1)
}
}
}
if (
options?.limit === 1 &&
options.total !== true &&
options?.sort === undefined &&
options?.projection === undefined
) {
const docs = this.documentRefs.get(classKey)
if (docs !== undefined) {
const _docs = Array.from(docs.values()).map((it) => it.doc)
const q = matchQuery(_docs, query, _class, this.getHierarchy())
if (q.length > 0) {
return toFindResult(clone([q[0]]), 1)
}
}
}
return null
}
}

View File

@ -0,0 +1,99 @@
import {
resultSort,
type Class,
type Doc,
type Hierarchy,
type MemDb,
type Ref,
type SortingQuery
} from '@hcengineering/core'
export class ResultArray {
private docs: Map<Ref<Doc>, Doc>
private readonly clones = new Map<string, Map<Ref<Doc>, Doc>>()
get length (): number {
return this.docs.size
}
constructor (
docs: Doc[],
readonly hierarchy: Hierarchy
) {
this.docs = new Map(docs.map((it) => [it._id, it]))
}
clean (): void {
this.clones.clear()
}
getDocs (): Doc[] {
return Array.from(this.docs.values())
}
findDoc (_id: Ref<Doc>): Doc | undefined {
return this.docs.get(_id)
}
getClone<T extends Doc>(): T[] {
return this.hierarchy.clone(this.getDocs())
}
getResult (id: string): Doc[] {
// Lets form a new list based on clones we have already.
const info = this.clones.get(id)
if (info === undefined) {
const docs = this.getClone()
this.clones.set(id, new Map(docs.map((it) => [it._id, it])))
return docs
} else {
return Array.from(info.values())
}
}
delete (_id: Ref<Doc>): Doc | undefined {
const doc = this.docs.get(_id)
this.docs.delete(_id)
for (const [, v] of this.clones.entries()) {
v.delete(_id)
}
return doc
}
updateDoc (doc: Doc, mainClone = true): void {
this.docs.set(doc._id, mainClone ? this.hierarchy.clone(doc) : doc)
for (const [, v] of this.clones.entries()) {
v.set(doc._id, this.hierarchy.clone(doc))
}
}
push (doc: Doc): void {
this.docs.set(doc._id, this.hierarchy.clone(doc))
for (const [, v] of this.clones.entries()) {
v.set(doc._id, this.hierarchy.clone(doc))
}
// this.changes.add(doc._id)
}
pop (): Doc | undefined {
const lastElement = Array.from(this.docs)[this.docs.size - 1]
if (lastElement !== undefined) {
this.docs.delete(lastElement[0])
for (const [, v] of this.clones.entries()) {
v.delete(lastElement[0])
}
return lastElement[1]
}
return undefined
}
sort<T extends Doc>(_class: Ref<Class<Doc>>, sort: SortingQuery<T>, hierarchy: Hierarchy, memdb: MemDb): void {
const docs = Array.from(this.docs.values())
resultSort(docs, sort, _class, hierarchy, memdb)
this.docs = new Map(docs.map((it) => [it._id, it]))
for (const [k, v] of this.clones.entries()) {
this.clones.set(k, new Map(docs.map((it) => [it._id, v.get(it._id) ?? this.hierarchy.clone(it)])))
}
}
}

View File

@ -0,0 +1,17 @@
import type { Class, Doc, DocumentQuery, FindOptions, FindResult, Ref } from '@hcengineering/core'
import type { ResultArray } from './results'
export type Callback = (result: FindResult<Doc>) => void
export type QueryId = number
export interface Query {
id: QueryId // uniq query identifier.
_class: Ref<Class<Doc>>
query: DocumentQuery<Doc>
result: ResultArray | Promise<ResultArray>
options?: FindOptions<Doc>
total: number
callbacks: Map<string, Callback>
refresh: () => Promise<void>
}

View File

@ -63,14 +63,14 @@ export class InboxNotificationsClientImpl implements InboxNotificationsClient {
) )
readonly inboxNotificationsByContext = derived( readonly inboxNotificationsByContext = derived(
[this.contexts, this.inboxNotifications], [this.contextById, this.inboxNotifications],
([notifyContexts, inboxNotifications]) => { ([contextById, inboxNotifications]) => {
if (inboxNotifications.length === 0 || notifyContexts.length === 0) { if (inboxNotifications.length === 0 || contextById.size === 0) {
return new Map<Ref<DocNotifyContext>, InboxNotification[]>() return new Map<Ref<DocNotifyContext>, InboxNotification[]>()
} }
return inboxNotifications.reduce((result, notification) => { return inboxNotifications.reduce((result, notification) => {
const notifyContext = notifyContexts.find(({ _id }) => _id === notification.docNotifyContext) const notifyContext = contextById.get(notification.docNotifyContext)
if (notifyContext === undefined) { if (notifyContext === undefined) {
return result return result

View File

@ -27,7 +27,7 @@ import core, {
type WithLookup type WithLookup
} from '@hcengineering/core' } from '@hcengineering/core'
import { getResource } from '@hcengineering/platform' import { getResource } from '@hcengineering/platform'
import { getClient } from '@hcengineering/presentation' import { addRefreshListener, getClient } from '@hcengineering/presentation'
import { getEventPositionElement, showPopup } from '@hcengineering/ui' import { getEventPositionElement, showPopup } from '@hcengineering/ui'
import { import {
type Action, type Action,
@ -54,6 +54,12 @@ export function getSelection (focus: FocusSelection, selection: SelectionStore):
return docs return docs
} }
const allActions = new Map<ViewContextType, Action[]>()
addRefreshListener(() => {
allActions.clear()
})
/** /**
* @public * @public
* *
@ -68,9 +74,11 @@ export async function getActions (
derived: Ref<Class<Doc>> = core.class.Doc, derived: Ref<Class<Doc>> = core.class.Doc,
mode: ViewContextType = 'context' mode: ViewContextType = 'context'
): Promise<Action[]> { ): Promise<Action[]> {
const actions: Action[] = await client.findAll(view.class.Action, { let actions: Action[] | undefined = allActions.get(mode)
'context.mode': mode if (actions === undefined) {
}) actions = client.getModel().findAllSync(view.class.Action, { 'context.mode': mode })
allActions.set(mode, actions)
}
const filteredActions = await filterAvailableActions(actions, client, doc, derived) const filteredActions = await filterAvailableActions(actions, client, doc, derived)

View File

@ -13,8 +13,17 @@
// limitations under the License. // limitations under the License.
--> -->
<script lang="ts"> <script lang="ts">
import core, { Doc, FindResult, getObjectValue, Ref, RefTo, SortingOrder, Space } from '@hcengineering/core' import core, {
import { getResource, translate } from '@hcengineering/platform' Doc,
FindResult,
getObjectValue,
Ref,
RefTo,
SortingOrder,
Space,
type WithLookup
} from '@hcengineering/core'
import { getResourceC, translate } from '@hcengineering/platform'
import presentation, { getClient } from '@hcengineering/presentation' import presentation, { getClient } from '@hcengineering/presentation'
import ui, { import ui, {
addNotification, addNotification,
@ -57,28 +66,59 @@
$: clazz = hierarchy.getClass(targetClass) $: clazz = hierarchy.getClass(targetClass)
$: mixin = hierarchy.classHierarchyMixin(targetClass, view.mixin.Groupping) $: mixin = hierarchy.classHierarchyMixin(targetClass, view.mixin.Groupping)
$: if (mixin?.grouppingManager !== undefined) { $: if (mixin?.grouppingManager !== undefined) {
getResource(mixin.grouppingManager).then((mgr) => (grouppingManager = mgr)) getResourceC(mixin.grouppingManager, (mgr) => (grouppingManager = mgr))
} else {
grouppingManager = undefined
} }
let filterUpdateTimeout: any | undefined let filterUpdateTimeout: any | undefined
async function getValues (search: string): Promise<void> { async function getValues (search: string): Promise<void> {
if (objectsPromise) { if (objectsPromise !== undefined) {
await objectsPromise await objectsPromise
} }
targets.clear() targets.clear()
const spaces = ( const baseObjects: WithLookup<Doc>[] = await client.findAll(
await client.findAll( filter.key._class,
core.class.Space, space !== undefined
{ archived: { $ne: true } }, ? {
{ projection: { _id: 1, archived: 1, _class: 1 } } space
}
: { '$lookup.space.archived': false },
{
projection: { [filter.key.key]: 1 },
lookup: {
space: core.class.Space
},
limit: 1000
}
) )
).map((it) => it._id) if (baseObjects.length === 1000) {
// We have more so let's fetch all
const ninTarget = Array.from(new Set(baseObjects.map((it) => getObjectValue(filter.key.key, it) ?? undefined)))
const extraObjects = await client.findAll(
filter.key._class,
{
...(space !== undefined
? { space }
: {
'$lookup.space.archived': false
}),
[filter.key.key]: {
$nin: ninTarget
}
},
{
projection: { [filter.key.key]: 1 },
lookup: {
space: core.class.Space
}
}
)
baseObjects.push(...extraObjects)
}
const baseObjects = await client.findAll(filter.key._class, space ? { space } : { space: { $in: spaces } }, {
projection: { [filter.key.key]: 1, space: 1 }
})
for (const object of baseObjects) { for (const object of baseObjects) {
const value = getObjectValue(filter.key.key, object) ?? undefined const value = getObjectValue(filter.key.key, object) ?? undefined
targets.add(value) targets.add(value)

View File

@ -13,7 +13,16 @@
// limitations under the License. // limitations under the License.
--> -->
<script lang="ts"> <script lang="ts">
import core, { Class, Doc, FindResult, getObjectValue, Ref, SortingOrder, Space } from '@hcengineering/core' import core, {
Class,
Doc,
FindResult,
getObjectValue,
Ref,
SortingOrder,
Space,
type WithLookup
} from '@hcengineering/core'
import presentation, { getClient } from '@hcengineering/presentation' import presentation, { getClient } from '@hcengineering/presentation'
import ui, { import ui, {
deviceOptionsStore, deviceOptionsStore,
@ -75,47 +84,36 @@
: {} : {}
const isDerivedFromSpace = hierarchy.isDerived(_class, core.class.Space) const isDerivedFromSpace = hierarchy.isDerived(_class, core.class.Space)
const spaces = async function doQuery (limit: number | undefined, sortedValues: any[] | undefined): Promise<boolean> {
space !== undefined || isDerivedFromSpace
? []
: (
await client.findAll(
core.class.Space,
{ archived: { $ne: true } },
{
projection: {
_id: 1,
archived: 1,
_class: 1
}
}
)
).map((it) => it._id)
async function doQuery (limit: number | undefined, first1000?: any[]): Promise<boolean> {
const p = client.findAll( const p = client.findAll(
_class, _class,
{ {
...resultQuery, ...resultQuery,
...(space ...(space !== undefined
? { space } ? { space }
: isDerivedFromSpace : isDerivedFromSpace
? viewOptions === undefined || viewOptions?.hideArchived === true ? viewOptions === undefined || viewOptions?.hideArchived === true
? { archived: false } ? { archived: false }
: {} : {}
: { space: { $in: spaces } }), : {
...(first1000 ? { [filter.key.key]: { $nin: first1000 } } : {}) '$lookup.space.archived': false
}),
...(sortedValues !== undefined ? { [prefix + filter.key.key]: { $nin: sortedValues } } : {})
}, },
{ {
sort: { modifiedOn: SortingOrder.Descending }, sort: { modifiedOn: SortingOrder.Descending },
projection: { [prefix + filter.key.key]: 1 }, projection: { [prefix + filter.key.key]: 1 },
...(limit !== undefined ? { limit } : {}) ...(limit !== undefined ? { limit } : {}),
lookup: {
space: core.class.Space
}
} }
) )
if (limit !== undefined) { if (limit !== undefined) {
objectsPromise = p objectsPromise = p
} }
const res = await p const res: WithLookup<Doc>[] = await p
// We need to filter archived in case it is required
for (const object of res) { for (const object of res) {
let asDoc = object let asDoc = object
@ -132,7 +130,7 @@
} }
return res.length >= (limit ?? 0) return res.length >= (limit ?? 0)
} }
const hasMore = await doQuery(1000) const hasMore = await doQuery(1000, undefined)
values = values values = values
sortedValues = sortFilterValues([...values.keys()], (v) => isSelected(v, selectedValues)) sortedValues = sortFilterValues([...values.keys()], (v) => isSelected(v, selectedValues))
objectsPromise = undefined objectsPromise = undefined

View File

@ -102,13 +102,13 @@ services:
- ELASTIC_URL=http://elastic:9200 - ELASTIC_URL=http://elastic:9200
- GMAIL_URL=http://host.docker.internal:8088 - GMAIL_URL=http://host.docker.internal:8088
- CALENDAR_URL=http://host.docker.internal:8095 - CALENDAR_URL=http://host.docker.internal:8095
- REKONI_URL=http://rekoni:4005 - REKONI_URL=http://rekoni:4007
- TELEGRAM_URL=http://host.docker.internal:8086 - TELEGRAM_URL=http://host.docker.internal:8086
- COLLABORATOR_URL=ws://host.docker.internal:3079 - COLLABORATOR_URL=ws://host.docker.internal:3079
- STORAGE_CONFIG=${STORAGE_CONFIG} - STORAGE_CONFIG=${STORAGE_CONFIG}
- BRANDING_URL=http://host.docker.internal:8083/branding-test.json - BRANDING_URL=http://host.docker.internal:8083/branding-test.json
- PRINT_URL=http://host.docker.internal:4005 - PRINT_URL=http://host.docker.internal:4003
- SIGN_URL=http://host.docker.internal:4006 - SIGN_URL=http://host.docker.internal:4008
transactor: transactor:
image: hardcoreeng/transactor image: hardcoreeng/transactor
extra_hosts: extra_hosts:
@ -128,11 +128,9 @@ services:
- SERVER_PORT=3334 - SERVER_PORT=3334
- SERVER_SECRET=secret - SERVER_SECRET=secret
- DB_URL=mongodb://mongodb:27018 - DB_URL=mongodb://mongodb:27018
- MONGO_URL=mongodb://mongodb:27018
- METRICS_CONSOLE=false - METRICS_CONSOLE=false
- METRICS_FILE=metrics.txt - METRICS_FILE=metrics.txt
- STORAGE_CONFIG=${STORAGE_CONFIG} - STORAGE_CONFIG=${STORAGE_CONFIG}
- REKONI_URL=http://rekoni:4005
- FRONT_URL=http://host.docker.internal:8083 - FRONT_URL=http://host.docker.internal:8083
- UPLOAD_URL=http://host.docker.internal:8083/files - UPLOAD_URL=http://host.docker.internal:8083/files
- ACCOUNTS_URL=http://account:3003 - ACCOUNTS_URL=http://account:3003
@ -160,13 +158,15 @@ services:
rekoni: rekoni:
image: hardcoreeng/rekoni-service image: hardcoreeng/rekoni-service
restart: on-failure restart: on-failure
ports:
- 4007:4004
print: print:
image: hardcoreeng/print image: hardcoreeng/print
extra_hosts: extra_hosts:
- "host.docker.internal:host-gateway" - "host.docker.internal:host-gateway"
restart: unless-stopped restart: unless-stopped
ports: ports:
- 4005:4005 - 4003:4005
environment: environment:
- SECRET=secret - SECRET=secret
- MONGO_URL=${MONGO_URL} - MONGO_URL=${MONGO_URL}
@ -182,7 +182,7 @@ services:
- "host.docker.internal:host-gateway" - "host.docker.internal:host-gateway"
restart: unless-stopped restart: unless-stopped
ports: ports:
- 4006:4006 - 4008:4006
volumes: volumes:
- ../services/sign/pod-sign/debug/certificate.p12:/var/cfg/certificate.p12 - ../services/sign/pod-sign/debug/certificate.p12:/var/cfg/certificate.p12
- ../services/sign/pod-sign/debug/branding.json:/var/cfg/branding.json - ../services/sign/pod-sign/debug/branding.json:/var/cfg/branding.json

View File

@ -4,6 +4,7 @@ export MINIO_SECRET_KEY=minioadmin
export MINIO_ENDPOINT=localhost:9000 export MINIO_ENDPOINT=localhost:9000
export MONGO_URL=mongodb://localhost:27017 export MONGO_URL=mongodb://localhost:27017
export DB_URL=mongodb://localhost:27017 export DB_URL=mongodb://localhost:27017
export ACCOUNT_DB_URL=mongodb://localhost:27017
export ACCOUNTS_URL=http://localhost:3000 export ACCOUNTS_URL=http://localhost:3000
export TRANSACTOR_URL=ws://localhost:3333 export TRANSACTOR_URL=ws://localhost:3333
export ELASTIC_URL=http://localhost:9200 export ELASTIC_URL=http://localhost:9200

View File

@ -6,6 +6,7 @@ export MINIO_ENDPOINT=localhost:9002
export ACCOUNTS_URL=http://localhost:3003 export ACCOUNTS_URL=http://localhost:3003
export TRANSACTOR_URL=ws://localhost:3334 export TRANSACTOR_URL=ws://localhost:3334
export MONGO_URL=mongodb://localhost:27018 export MONGO_URL=mongodb://localhost:27018
export ACCOUNT_DB_URL=mongodb://localhost:27018
export DB_URL=mongodb://localhost:27018 export DB_URL=mongodb://localhost:27018
export ELASTIC_URL=http://localhost:9201 export ELASTIC_URL=http://localhost:9201
export SERVER_SECRET=secret export SERVER_SECRET=secret

View File

@ -868,8 +868,8 @@ export async function countWorkspacesInRegion (
/** /**
* @public * @public
*/ */
export async function listWorkspacesRaw (db: AccountDB): Promise<Workspace[]> { export async function listWorkspacesRaw (db: AccountDB, region?: string): Promise<Workspace[]> {
return (await db.workspace.find({})).filter((it) => it.disabled !== true) return (await db.workspace.find(region !== undefined ? { region } : {})).filter((it) => it.disabled !== true)
} }
/** /**

View File

@ -643,6 +643,12 @@ abstract class MongoAdapterBase implements DbAdapter {
const ckey = this.checkMixinKey<T>(key, clazz) as keyof T const ckey = this.checkMixinKey<T>(key, clazz) as keyof T
projection[ckey] = options.projection[key] projection[ckey] = options.projection[key]
} }
for (const step of steps) {
// We also need to add lookup if original field are in projection.
if ((projection as any)[step.from] === 1) {
;(projection as any)[step.as] = 1
}
}
pipeline.push({ $project: projection }) pipeline.push({ $project: projection })
} }

View File

@ -1082,6 +1082,12 @@ abstract class PostgresAdapterBase implements DbAdapter {
if (projection === undefined) { if (projection === undefined) {
res.push(`${baseDomain}.*`) res.push(`${baseDomain}.*`)
} else { } else {
if (projection._id === undefined) {
res.push(`${baseDomain}."_id" AS "_id"`)
}
if (projection._class === undefined) {
res.push(`${baseDomain}."_class" AS "_class"`)
}
for (const key in projection) { for (const key in projection) {
if (isDataField(baseDomain, key)) { if (isDataField(baseDomain, key)) {
if (!dataAdded) { if (!dataAdded) {

View File

@ -4,6 +4,7 @@ export MINIO_SECRET_KEY=minioadmin
export MINIO_ENDPOINT=localhost:9000 export MINIO_ENDPOINT=localhost:9000
export MONGO_URL=mongodb://localhost:27017 export MONGO_URL=mongodb://localhost:27017
export DB_URL=mongodb://localhost:27017 export DB_URL=mongodb://localhost:27017
export ACCOUNT_DB_URL=mongodb://localhost:27017
export ACCOUNTS_URL=http://localhost:3000 export ACCOUNTS_URL=http://localhost:3000
export TRANSACTOR_URL=ws://localhost:3333 export TRANSACTOR_URL=ws://localhost:3333
export ELASTIC_URL=http://localhost:9200 export ELASTIC_URL=http://localhost:9200

View File

@ -5,6 +5,7 @@ export MINIO_SECRET_KEY=minioadmin
export MINIO_ENDPOINT=localhost:9002 export MINIO_ENDPOINT=localhost:9002
export ACCOUNTS_URL=http://localhost:3003 export ACCOUNTS_URL=http://localhost:3003
export TRANSACTOR_URL=ws://localhost:3334 export TRANSACTOR_URL=ws://localhost:3334
export ACCOUNT_DB_URL=postgresql://postgres:example@localhost:5433
export MONGO_URL=mongodb://localhost:27018 export MONGO_URL=mongodb://localhost:27018
export ELASTIC_URL=http://localhost:9201 export ELASTIC_URL=http://localhost:9201
export SERVER_SECRET=secret export SERVER_SECRET=secret

View File

@ -5,6 +5,7 @@ export MINIO_SECRET_KEY=minioadmin
export MINIO_ENDPOINT=localhost:9002 export MINIO_ENDPOINT=localhost:9002
export ACCOUNTS_URL=http://localhost:3003 export ACCOUNTS_URL=http://localhost:3003
export TRANSACTOR_URL=ws://localhost:3334 export TRANSACTOR_URL=ws://localhost:3334
export ACCOUNT_DB_URL=mongodb://localhost:27018
export MONGO_URL=mongodb://localhost:27018 export MONGO_URL=mongodb://localhost:27018
export DB_URL=mongodb://localhost:27018 export DB_URL=mongodb://localhost:27018
export ELASTIC_URL=http://localhost:9201 export ELASTIC_URL=http://localhost:9201