Signed-off-by: Andrey Sobolev <haiodo@gmail.com>
This commit is contained in:
Andrey Sobolev 2023-02-15 10:14:20 +07:00 committed by GitHub
parent aa8530d678
commit 199da5077a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
71 changed files with 888 additions and 312 deletions

View File

@ -302,6 +302,7 @@ specifiers:
fast-equals: ^2.0.3 fast-equals: ^2.0.3
file-loader: ^6.2.0 file-loader: ^6.2.0
filesize: ^8.0.3 filesize: ^8.0.3
fork-ts-checker-webpack-plugin: ~7.3.0
got: ^11.8.3 got: ^11.8.3
html-to-text: ^9.0.3 html-to-text: ^9.0.3
html-webpack-plugin: ^5.5.0 html-webpack-plugin: ^5.5.0
@ -671,6 +672,7 @@ dependencies:
fast-equals: 2.0.4 fast-equals: 2.0.4
file-loader: 6.2.0_webpack@5.75.0 file-loader: 6.2.0_webpack@5.75.0
filesize: 8.0.7 filesize: 8.0.7
fork-ts-checker-webpack-plugin: 7.3.0_typescript@4.8.4+webpack@5.75.0
got: 11.8.5 got: 11.8.5
html-to-text: 9.0.3 html-to-text: 9.0.3
html-webpack-plugin: 5.5.0_webpack@5.75.0 html-webpack-plugin: 5.5.0_webpack@5.75.0
@ -2714,6 +2716,26 @@ packages:
defer-to-connect: 2.0.1 defer-to-connect: 2.0.1
dev: false dev: false
/@tiptap/core/2.0.0-beta.209_88faf4ceee3e4a6aeb2426d142be0a06:
resolution: {integrity: sha512-DOOzfo2XKD5Qt2oEGW33/6ugwSnvpl4WbxtlKdPadLoApk6Kja3K1Eps3pihBgIGmo4tkctkCzmj8wNWS7KeWg==}
peerDependencies:
prosemirror-commands: ^1.3.1
prosemirror-keymap: ^1.2.0
prosemirror-model: ^1.18.1
prosemirror-schema-list: ^1.2.2
prosemirror-state: ^1.4.1
prosemirror-transform: ^1.7.0
prosemirror-view: ^1.28.2
dependencies:
prosemirror-commands: 1.5.0
prosemirror-keymap: 1.2.0
prosemirror-model: 1.18.3
prosemirror-schema-list: 1.2.2
prosemirror-state: 1.4.2
prosemirror-transform: 1.7.0
prosemirror-view: 1.29.1
dev: false
/@tiptap/core/2.0.0-beta.209_b7ea67b8e383e94e567bbd2a5d53cb52: /@tiptap/core/2.0.0-beta.209_b7ea67b8e383e94e567bbd2a5d53cb52:
resolution: {integrity: sha512-DOOzfo2XKD5Qt2oEGW33/6ugwSnvpl4WbxtlKdPadLoApk6Kja3K1Eps3pihBgIGmo4tkctkCzmj8wNWS7KeWg==} resolution: {integrity: sha512-DOOzfo2XKD5Qt2oEGW33/6ugwSnvpl4WbxtlKdPadLoApk6Kja3K1Eps3pihBgIGmo4tkctkCzmj8wNWS7KeWg==}
peerDependencies: peerDependencies:
@ -3024,6 +3046,22 @@ packages:
'@tiptap/core': 2.0.0-beta.209_b7ea67b8e383e94e567bbd2a5d53cb52 '@tiptap/core': 2.0.0-beta.209_b7ea67b8e383e94e567bbd2a5d53cb52
dev: false dev: false
/@tiptap/prosemirror-tables/1.1.4_820a2ec93be59195864136797737fd20:
resolution: {integrity: sha512-O2XnDhZV7xTHSFxMMl8Ei3UVeCxuMlbGYZ+J2QG8CzkK8mxDpBa66kFr5DdyAhvdi1ptpcH9u7/GMwItQpN4sA==}
peerDependencies:
prosemirror-keymap: ^1.1.2
prosemirror-model: ^1.8.1
prosemirror-state: ^1.3.1
prosemirror-transform: ^1.2.1
prosemirror-view: ^1.13.3
dependencies:
prosemirror-keymap: 1.2.0
prosemirror-model: 1.18.3
prosemirror-state: 1.4.2
prosemirror-transform: 1.7.0
prosemirror-view: 1.29.1
dev: false
/@tiptap/prosemirror-tables/1.1.4_825d0bccef8ba664696e76031c65278e: /@tiptap/prosemirror-tables/1.1.4_825d0bccef8ba664696e76031c65278e:
resolution: {integrity: sha512-O2XnDhZV7xTHSFxMMl8Ei3UVeCxuMlbGYZ+J2QG8CzkK8mxDpBa66kFr5DdyAhvdi1ptpcH9u7/GMwItQpN4sA==} resolution: {integrity: sha512-O2XnDhZV7xTHSFxMMl8Ei3UVeCxuMlbGYZ+J2QG8CzkK8mxDpBa66kFr5DdyAhvdi1ptpcH9u7/GMwItQpN4sA==}
peerDependencies: peerDependencies:
@ -3074,6 +3112,41 @@ packages:
- prosemirror-view - prosemirror-view
dev: false dev: false
/@tiptap/starter-kit/2.0.0-beta.209_4bc2915a7798d139ce161d1059f26dca:
resolution: {integrity: sha512-uR68ZfDZ5PeygGey3xc9ZuFIP+K7VRElrABnZcM6t9/Crrs70UFwSTNlkS0ezx9woj8h+8N78a6r8W1YC04TOw==}
dependencies:
'@tiptap/core': 2.0.0-beta.209_88faf4ceee3e4a6aeb2426d142be0a06
'@tiptap/extension-blockquote': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-bold': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-bullet-list': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-code': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-code-block': 2.0.0-beta.209_7c4b41d3b933ab120335c25fb0c64f89
'@tiptap/extension-document': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-dropcursor': 2.0.0-beta.209_8146261db7b5e4b7ca8f9611f0136fd1
'@tiptap/extension-gapcursor': 2.0.0-beta.209_ae82587580408645a82c84b21276f03e
'@tiptap/extension-hard-break': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-heading': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-history': 2.0.0-beta.209_e2f83d342d3e4fc6f345741d5563b0d1
'@tiptap/extension-horizontal-rule': 2.0.0-beta.209_7c4b41d3b933ab120335c25fb0c64f89
'@tiptap/extension-italic': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-list-item': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-ordered-list': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-paragraph': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-strike': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-text': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
transitivePeerDependencies:
- prosemirror-commands
- prosemirror-dropcursor
- prosemirror-gapcursor
- prosemirror-history
- prosemirror-keymap
- prosemirror-model
- prosemirror-schema-list
- prosemirror-state
- prosemirror-transform
- prosemirror-view
dev: false
/@tiptap/suggestion/2.0.0-beta.209_6d771ef4ce96210c593056969447c447: /@tiptap/suggestion/2.0.0-beta.209_6d771ef4ce96210c593056969447c447:
resolution: {integrity: sha512-KKV64rTzTGY1q03nK0b4wCrAmihwThYJrYlPTUTelQm0AeJ4EPTNMRSR5rHD+fVF7agqrtrCkMw46vTXd6j1Jw==} resolution: {integrity: sha512-KKV64rTzTGY1q03nK0b4wCrAmihwThYJrYlPTUTelQm0AeJ4EPTNMRSR5rHD+fVF7agqrtrCkMw46vTXd6j1Jw==}
peerDependencies: peerDependencies:
@ -16248,12 +16321,12 @@ packages:
dev: false dev: false
file:projects/text-editor.tgz_1e646fd62a3adc9d0b41f256dafe3d38: file:projects/text-editor.tgz_1e646fd62a3adc9d0b41f256dafe3d38:
resolution: {integrity: sha512-cBdQh4hTVCpoolCLbnlHRe13ldtnGOFkf09z+B7W/Rqe1qJuY0t0KkX2KZKU/ibCQgA+duXx9rzgyjpMMThcaw==, tarball: file:projects/text-editor.tgz} resolution: {integrity: sha512-4+cwBn+k4JdLN00mVFFMngvgdwonyINba4W27Ku8YKU6J0HQ6kLLY5d1htGHr1l6ZJvnrbe31tELdfJJTkJH/Q==, tarball: file:projects/text-editor.tgz}
id: file:projects/text-editor.tgz id: file:projects/text-editor.tgz
name: '@rush-temp/text-editor' name: '@rush-temp/text-editor'
version: 0.0.0 version: 0.0.0
dependencies: dependencies:
'@tiptap/core': 2.0.0-beta.209_b7ea67b8e383e94e567bbd2a5d53cb52 '@tiptap/core': 2.0.0-beta.209_88faf4ceee3e4a6aeb2426d142be0a06
'@tiptap/extension-code-block': 2.0.0-beta.209_7c4b41d3b933ab120335c25fb0c64f89 '@tiptap/extension-code-block': 2.0.0-beta.209_7c4b41d3b933ab120335c25fb0c64f89
'@tiptap/extension-collaboration': 2.0.0-beta.209_c3e1167b714fc2c0e8099b0d893dd8f4 '@tiptap/extension-collaboration': 2.0.0-beta.209_c3e1167b714fc2c0e8099b0d893dd8f4
'@tiptap/extension-collaboration-cursor': 2.0.0-beta.209_542658698eb070984e7c071802d831ce '@tiptap/extension-collaboration-cursor': 2.0.0-beta.209_542658698eb070984e7c071802d831ce
@ -16270,8 +16343,8 @@ packages:
'@tiptap/extension-task-item': 2.0.0-beta.209_06eca8551536c047b95c272597601c00 '@tiptap/extension-task-item': 2.0.0-beta.209_06eca8551536c047b95c272597601c00
'@tiptap/extension-task-list': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209 '@tiptap/extension-task-list': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/extension-typography': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209 '@tiptap/extension-typography': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
'@tiptap/prosemirror-tables': 1.1.4_825d0bccef8ba664696e76031c65278e '@tiptap/prosemirror-tables': 1.1.4_820a2ec93be59195864136797737fd20
'@tiptap/starter-kit': 2.0.0-beta.209_3f796369a8f3f702340a0f7a928227c2 '@tiptap/starter-kit': 2.0.0-beta.209_4bc2915a7798d139ce161d1059f26dca
'@tiptap/suggestion': 2.0.0-beta.209_6d771ef4ce96210c593056969447c447 '@tiptap/suggestion': 2.0.0-beta.209_6d771ef4ce96210c593056969447c447
'@types/diff': 5.0.2 '@types/diff': 5.0.2
'@typescript-eslint/eslint-plugin': 5.42.1_d506b9be61cb4ac2646ecbc6e0680464 '@typescript-eslint/eslint-plugin': 5.42.1_d506b9be61cb4ac2646ecbc6e0680464
@ -16292,6 +16365,7 @@ packages:
prosemirror-dropcursor: 1.6.1 prosemirror-dropcursor: 1.6.1
prosemirror-gapcursor: 1.3.1 prosemirror-gapcursor: 1.3.1
prosemirror-history: 1.3.0 prosemirror-history: 1.3.0
prosemirror-keymap: 1.2.0
prosemirror-model: 1.18.3 prosemirror-model: 1.18.3
prosemirror-schema-list: 1.2.2 prosemirror-schema-list: 1.2.2
prosemirror-state: 1.4.2 prosemirror-state: 1.4.2
@ -16314,7 +16388,6 @@ packages:
- node-sass - node-sass
- postcss - postcss
- postcss-load-config - postcss-load-config
- prosemirror-keymap
- pug - pug
- stylus - stylus
- sugarss - sugarss

View File

@ -112,20 +112,21 @@ export async function connect (handler: (tx: Tx) => void): Promise<ClientConnect
url: '' url: ''
} }
}, },
metrics: new MeasureMetricsContext('', {}),
fulltextAdapter: { fulltextAdapter: {
factory: createNullFullTextAdapter, factory: createNullFullTextAdapter,
url: '', url: '',
metrics: new MeasureMetricsContext('', {}),
stages: () => [] stages: () => []
}, },
contentAdapter: { contentAdapter: {
url: '', url: '',
factory: createNullContentTextAdapter, factory: createNullContentTextAdapter
metrics: new MeasureMetricsContext('', {})
}, },
workspace: getWorkspaceId('') workspace: getWorkspaceId('')
} }
const serverStorage = await createServerStorage(conf) const serverStorage = await createServerStorage(conf, {
upgrade: false
})
setMetadata(devmodel.metadata.DevModel, serverStorage) setMetadata(devmodel.metadata.DevModel, serverStorage)
return new ServerStorageWrapper(serverStorage, handler) return new ServerStorageWrapper(serverStorage, handler)
} }

View File

@ -63,17 +63,16 @@ export async function start (port: number, host?: string): Promise<void> {
fulltextAdapter: { fulltextAdapter: {
factory: createNullFullTextAdapter, factory: createNullFullTextAdapter,
url: '', url: '',
metrics: new MeasureMetricsContext('', {}),
stages: () => [] stages: () => []
}, },
metrics: new MeasureMetricsContext('', {}),
contentAdapter: { contentAdapter: {
url: '', url: '',
factory: createNullContentTextAdapter, factory: createNullContentTextAdapter
metrics: new MeasureMetricsContext('', {})
}, },
workspace: getWorkspaceId('') workspace: getWorkspaceId('')
} }
return createPipeline(conf, [], false) return createPipeline(conf, [], false, () => {})
}, },
(token, pipeline, broadcast) => new ClientSession(broadcast, token, pipeline), (token, pipeline, broadcast) => new ClientSession(broadcast, token, pipeline),
port, port,

View File

@ -47,7 +47,7 @@ export const migrateOperations: [string, MigrateOperation][] = [
['telegram', telegramOperation], ['telegram', telegramOperation],
['task', taskOperation], ['task', taskOperation],
['attachment', attachmentOperation], ['attachment', attachmentOperation],
['', automationOperation], ['automation', automationOperation],
['lead', leadOperation], ['lead', leadOperation],
['recruit', recruitOperation], ['recruit', recruitOperation],
['view', viewOperation], ['view', viewOperation],

View File

@ -18,16 +18,25 @@ import { Model, Builder } from '@hcengineering/model'
import type { Resource } from '@hcengineering/platform' import type { Resource } from '@hcengineering/platform'
import { TClass, TDoc } from '@hcengineering/model-core' import { TClass, TDoc } from '@hcengineering/model-core'
import type { ObjectDDParticipant, Trigger, TriggerFunc } from '@hcengineering/server-core' import type {
AsyncTrigger,
ObjectDDParticipant,
Trigger,
TriggerFunc,
AsyncTriggerState,
AsyncTriggerFunc
} from '@hcengineering/server-core'
import core, { import core, {
Class, Class,
Doc, Doc,
DocumentQuery, DocumentQuery,
DOMAIN_DOC_INDEX_STATE,
DOMAIN_MODEL, DOMAIN_MODEL,
FindOptions, FindOptions,
FindResult, FindResult,
Hierarchy, Hierarchy,
Ref Ref,
TxCUD
} from '@hcengineering/core' } from '@hcengineering/core'
import serverCore from '@hcengineering/server-core' import serverCore from '@hcengineering/server-core'
@ -36,6 +45,18 @@ export class TTrigger extends TDoc implements Trigger {
trigger!: Resource<TriggerFunc> trigger!: Resource<TriggerFunc>
} }
@Model(serverCore.class.AsyncTrigger, core.class.Doc, DOMAIN_MODEL)
export class TAsyncTrigger extends TDoc implements AsyncTrigger {
trigger!: Resource<AsyncTriggerFunc>
classes!: Ref<Class<Doc>>[]
}
@Model(serverCore.class.AsyncTriggerState, core.class.Doc, DOMAIN_DOC_INDEX_STATE)
export class TAsyncTriggerState extends TDoc implements AsyncTriggerState {
tx!: TxCUD<Doc>
message!: string
}
@Model(serverCore.mixin.ObjectDDParticipant, core.class.Class) @Model(serverCore.mixin.ObjectDDParticipant, core.class.Class)
export class TObjectDDParticipant extends TClass implements ObjectDDParticipant { export class TObjectDDParticipant extends TClass implements ObjectDDParticipant {
collectDocs!: Resource< collectDocs!: Resource<
@ -52,5 +73,5 @@ export class TObjectDDParticipant extends TClass implements ObjectDDParticipant
} }
export function createModel (builder: Builder): void { export function createModel (builder: Builder): void {
builder.createModel(TTrigger, TObjectDDParticipant) builder.createModel(TTrigger, TObjectDDParticipant, TAsyncTriggerState, TAsyncTrigger)
} }

View File

@ -30,6 +30,8 @@
"@hcengineering/platform": "^0.6.8", "@hcengineering/platform": "^0.6.8",
"@hcengineering/model-core": "^0.6.0", "@hcengineering/model-core": "^0.6.0",
"@hcengineering/openai": "^0.6.0", "@hcengineering/openai": "^0.6.0",
"@hcengineering/server-core": "^0.6.1" "@hcengineering/server-core": "^0.6.1",
"@hcengineering/model-chunter": "^0.6.0",
"@hcengineering/model-recruit": "^0.6.0"
} }
} }

View File

@ -22,6 +22,9 @@ import core, { DOMAIN_CONFIGURATION } from '@hcengineering/core'
import openai, { OpenAIConfiguration } from '@hcengineering/openai/src/plugin' import openai, { OpenAIConfiguration } from '@hcengineering/openai/src/plugin'
import serverCore from '@hcengineering/server-core' import serverCore from '@hcengineering/server-core'
import chunter from '@hcengineering/model-chunter'
import recruit from '@hcengineering/model-recruit'
@Model(openai.class.OpenAIConfiguration, core.class.Configuration, DOMAIN_CONFIGURATION) @Model(openai.class.OpenAIConfiguration, core.class.Configuration, DOMAIN_CONFIGURATION)
@UX(getEmbeddedLabel('OpenAI')) @UX(getEmbeddedLabel('OpenAI'))
export class TOpenAIConfiguration extends TConfiguration implements OpenAIConfiguration { export class TOpenAIConfiguration extends TConfiguration implements OpenAIConfiguration {
@ -41,7 +44,8 @@ export class TOpenAIConfiguration extends TConfiguration implements OpenAIConfig
export function createModel (builder: Builder): void { export function createModel (builder: Builder): void {
builder.createModel(TOpenAIConfiguration) builder.createModel(TOpenAIConfiguration)
builder.createDoc(serverCore.class.Trigger, core.space.Model, { builder.createDoc(serverCore.class.AsyncTrigger, core.space.Model, {
trigger: openai.trigger.OnGPTRequest trigger: openai.trigger.AsyncOnGPTRequest,
classes: [chunter.class.Comment, recruit.class.ApplicantMatch]
}) })
} }

View File

@ -66,5 +66,6 @@ export interface ServerStorage extends LowLevelStorage {
options?: FindOptions<T> options?: FindOptions<T>
) => Promise<FindResult<T>> ) => Promise<FindResult<T>>
tx: (ctx: MeasureContext, tx: Tx) => Promise<[TxResult, Tx[]]> tx: (ctx: MeasureContext, tx: Tx) => Promise<[TxResult, Tx[]]>
apply: (ctx: MeasureContext, tx: Tx[], broadcast: boolean) => Promise<Tx[]>
close: () => Promise<void> close: () => Promise<void>
} }

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -0,0 +1,55 @@
<script lang="ts">
import { DocIndexState } from '@hcengineering/core'
import { EditBox, Panel } from '@hcengineering/ui'
import IndexedDocumentContent from './IndexedDocumentContent.svelte'
export let left: DocIndexState
export let right: DocIndexState | undefined
let search: string = ''
</script>
<Panel on:changeContent on:close>
<EditBox focus bind:value={search} kind="search-style" />
<div class="indexed-background">
<div class="indexed-doc text-base max-h-125">
<div class="flex">
<div class="indexed-doc-part">
<IndexedDocumentContent indexDoc={left} {search} />
</div>
{#if right !== undefined}
<div class="indexed-doc-part">
<IndexedDocumentContent indexDoc={right} {search} />
</div>
{/if}
</div>
</div>
</div>
</Panel>
<style lang="scss">
.indexed-doc {
padding: 2.5rem;
display: flex;
overflow: auto;
min-width: 50rem;
max-width: 100rem;
}
.indexed-doc-part {
padding: 0.5rem;
display: grid;
overflow: auto;
min-width: 25rem;
max-width: 50rem;
}
.indexed-background {
background-color: white;
color: black;
user-select: text;
// width: 200rem;
.highlight {
color: blue;
}
}
</style>

View File

@ -0,0 +1,91 @@
<script lang="ts">
import { AnyAttribute, DocIndexState, extractDocKey, isFullTextAttribute } from '@hcengineering/core'
import { Label } from '@hcengineering/ui'
import Icon from '@hcengineering/ui/src/components/Icon.svelte'
import { getClient } from '../utils'
export let indexDoc: DocIndexState
export let search: string = ''
const client = getClient()
function getContent (extra: string[], value: string): string[] {
const result = extra.includes('base64') ? decodeURIComponent(escape(atob(value))) : value
return `${result}`.split('\n')
}
$: summary = indexDoc?.fullSummary ?? undefined
$: attributes =
indexDoc !== undefined
? Object.entries(indexDoc.attributes).reduce<[AnyAttribute, string[][]][]>((a, b) => {
const bb = extractDocKey(b[0])
if (bb._class === undefined) {
return a
}
const attr = client.getHierarchy().getAttribute(bb._class, bb.attr)
if (!isFullTextAttribute(attr)) {
return a
}
const pos = a.findIndex((it) => it[0] === attr)
if (pos !== -1) {
a[pos][1].push(getContent(bb.extra, b[1]))
} else {
a.push([attr, [getContent(bb.extra, b[1])]])
}
return a
}, [])
: []
</script>
{#if summary}
{#if search.length > 0}
Result:
{#each summary.split('\n').filter((line) => line.toLowerCase().includes(search.toLowerCase())) as line}
<span class:highlight={true}>{line}</span>
{/each}
<br />
{/if}
Summary:
{#each summary.split('\n') as line}
{@const hl = search.length > 0 && line.toLowerCase().includes(search.toLowerCase())}
<span class:text-md={!hl} class:highlight={hl}>{line}</span>
{/each}
{:else if indexDoc}
{#each attributes as attr}
{@const clOf = client.getHierarchy().getClass(attr[0].attributeOf)}
<div class="flex-row-center">
{#if clOf.icon}
<div class="mr-1">
<Icon size={'medium'} icon={clOf.icon} />
</div>
{/if}
<Label label={clOf.label} />.<Label label={attr[0].label} />
</div>
<div class="p-1 flex-row flex-wrap">
{#each attr[1] as doc}
<div class="p-1" class:flex-col={doc.length > 1}>
{#if search.length > 0}
Result:
{#each doc.filter((line) => line.toLowerCase().includes(search.toLowerCase())) as line}
<span class:highlight={true}>{line}</span>
{/each}
<br />
{/if}
{#each doc as line}
{@const hl = search.length > 0 && line.toLowerCase().includes(search.toLowerCase())}
<span class:text-md={!hl} class:highlight={hl}>{line}</span>
{/each}
</div>
{/each}
</div>
{/each}
{/if}
<style lang="scss">
.highlight {
color: blue;
}
</style>

View File

@ -1,15 +1,15 @@
<script lang="ts"> <script lang="ts">
import core, { AnyAttribute, Doc, DocIndexState, extractDocKey, isFullTextAttribute, Ref } from '@hcengineering/core' import core, { Doc, DocIndexState, Ref } from '@hcengineering/core'
import { EditBox, Label, Panel } from '@hcengineering/ui' import { EditBox, Panel } from '@hcengineering/ui'
import Icon from '@hcengineering/ui/src/components/Icon.svelte' import { createQuery } from '../utils'
import { createQuery, getClient } from '../utils' import IndexedDocumentContent from './IndexedDocumentContent.svelte'
export let objectId: Ref<Doc> export let objectId: Ref<Doc> | undefined
export let indexDoc: DocIndexState | undefined = undefined
export let search: string = ''
const client = getClient()
const indexDocQuery = createQuery() const indexDocQuery = createQuery()
let indexDoc: DocIndexState | undefined
$: if (objectId !== undefined) { $: if (objectId !== undefined) {
indexDocQuery.query(core.class.DocIndexState, { _id: objectId as Ref<DocIndexState> }, (res) => { indexDocQuery.query(core.class.DocIndexState, { _id: objectId as Ref<DocIndexState> }, (res) => {
console.log(res) console.log(res)
@ -18,84 +18,14 @@
} else { } else {
indexDocQuery.unsubscribe() indexDocQuery.unsubscribe()
} }
function getContent (extra: string[], value: string): string[] {
const result = extra.includes('base64') ? decodeURIComponent(escape(atob(value))) : value
return `${result}`.split('\n')
}
let search = ''
$: summary = indexDoc?.fullSummary ?? undefined
$: attributes =
indexDoc !== undefined
? Object.entries(indexDoc.attributes).reduce<[AnyAttribute, string[][]][]>((a, b) => {
const bb = extractDocKey(b[0])
if (bb._class === undefined) {
return a
}
const attr = client.getHierarchy().getAttribute(bb._class, bb.attr)
if (!isFullTextAttribute(attr)) {
return a
}
const pos = a.findIndex((it) => it[0] === attr)
if (pos !== -1) {
a[pos][1].push(getContent(bb.extra, b[1]))
} else {
a.push([attr, [getContent(bb.extra, b[1])]])
}
return a
}, [])
: []
</script> </script>
<Panel on:changeContent on:close> <Panel on:changeContent on:close>
<EditBox focus bind:value={search} kind="search-style" /> <EditBox focus bind:value={search} kind="search-style" />
<div class="indexed-background"> <div class="indexed-background">
<div class="indexed-doc text-base max-h-125"> <div class="indexed-doc text-base max-h-125">
{#if summary} {#if indexDoc}
{#if search.length > 0} <IndexedDocumentContent {indexDoc} {search} />
Result:
{#each summary.split('\n').filter((line) => line.toLowerCase().includes(search.toLowerCase())) as line}
<span class:highlight={true}>{line}</span>
{/each}
<br />
{/if}
Summary:
{#each summary.split('\n') as line}
{@const hl = search.length > 0 && line.toLowerCase().includes(search.toLowerCase())}
<span class:text-md={!hl} class:highlight={hl}>{line}</span>
{/each}
{:else if indexDoc}
{#each attributes as attr}
{@const clOf = client.getHierarchy().getClass(attr[0].attributeOf)}
<div class="flex-row-center">
{#if clOf.icon}
<div class="mr-1">
<Icon size={'medium'} icon={clOf.icon} />
</div>
{/if}
<Label label={clOf.label} />.<Label label={attr[0].label} />
</div>
<div class="p-1 flex-row flex-wrap">
{#each attr[1] as doc}
<div class="p-1" class:flex-col={doc.length > 1}>
{#if search.length > 0}
Result:
{#each doc.filter((line) => line.toLowerCase().includes(search.toLowerCase())) as line}
<span class:highlight={true}>{line}</span>
{/each}
<br />
{/if}
{#each doc as line}
{@const hl = search.length > 0 && line.toLowerCase().includes(search.toLowerCase())}
<span class:text-md={!hl} class:highlight={hl}>{line}</span>
{/each}
</div>
{/each}
</div>
{/each}
{/if} {/if}
</div> </div>
</div> </div>
@ -113,8 +43,5 @@
background-color: white; background-color: white;
color: black; color: black;
user-select: text; user-select: text;
.highlight {
color: blue;
}
} }
</style> </style>

View File

@ -46,6 +46,7 @@ export { default as IconPerson } from './components/icons/Person.svelte'
export { default as IconMembersOutline } from './components/icons/MembersOutline.svelte' export { default as IconMembersOutline } from './components/icons/MembersOutline.svelte'
export { default as ObjectSearchPopup } from './components/ObjectSearchPopup.svelte' export { default as ObjectSearchPopup } from './components/ObjectSearchPopup.svelte'
export { default as IndexedDocumentPreview } from './components/IndexedDocumentPreview.svelte' export { default as IndexedDocumentPreview } from './components/IndexedDocumentPreview.svelte'
export { default as IndexedDocumentCompare } from './components/IndexedDocumentCompare.svelte'
export { default as DraggableList } from './components/DraggableList.svelte' export { default as DraggableList } from './components/DraggableList.svelte'
export { connect, versionError } from './connect' export { connect, versionError } from './connect'
export { default } from './plugin' export { default } from './plugin'

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",
@ -65,6 +66,7 @@
"prosemirror-model": "^1.18.3", "prosemirror-model": "^1.18.3",
"prosemirror-view": "^1.29.1", "prosemirror-view": "^1.29.1",
"prosemirror-history": "^1.3.0", "prosemirror-history": "^1.3.0",
"prosemirror-keymap": "^1.2.0",
"rfc6902": "^5.0.1", "rfc6902": "^5.0.1",
"diff": "^5.1.0", "diff": "^5.1.0",
"@tiptap/extension-code-block": "~2.0.0-beta.209", "@tiptap/extension-code-block": "~2.0.0-beta.209",

View File

@ -9,7 +9,8 @@
"build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build", "build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -6,7 +6,7 @@
"license": "EPL-2.0", "license": "EPL-2.0",
"scripts": { "scripts": {
"build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build", "build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build",
"build:watch": "tsc", "build:watch": "tsc --incremental --noEmit --outDir ./dist_cache",
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",

View File

@ -9,7 +9,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -210,12 +210,9 @@
async function onMessage (event: CustomEvent) { async function onMessage (event: CustomEvent) {
loading = true loading = true
try {
await createAttachments() await createAttachments()
dispatch('message', { message: event.detail, attachments: attachments.size })
} finally {
loading = false loading = false
} dispatch('message', { message: event.detail, attachments: attachments.size })
} }
async function onUpdate (event: CustomEvent) { async function onUpdate (event: CustomEvent) {

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -112,7 +112,6 @@
async function onMessage (event: CustomEvent) { async function onMessage (event: CustomEvent) {
loading = true loading = true
try {
const { message, attachments } = event.detail const { message, attachments } = event.detail
await client.addCollection<Doc, Comment>( await client.addCollection<Doc, Comment>(
_class, _class,
@ -132,10 +131,8 @@
draftComment = undefined draftComment = undefined
await saveDraft(object) await saveDraft(object)
commentInputBox.removeDraft(false) commentInputBox.removeDraft(false)
} finally {
loading = false loading = false
} }
}
</script> </script>
<AttachmentRefInput <AttachmentRefInput

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -9,7 +9,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -5,15 +5,15 @@
Card, Card,
createQuery, createQuery,
getClient, getClient,
IndexedDocumentPreview, IndexedDocumentCompare,
MessageViewer, MessageViewer,
SpaceSelect SpaceSelect
} from '@hcengineering/presentation' } from '@hcengineering/presentation'
import { Applicant, ApplicantMatch, Candidate, Vacancy } from '@hcengineering/recruit' import { Applicant, ApplicantMatch, Candidate, Vacancy } from '@hcengineering/recruit'
import { Button, IconActivity, IconAdd, Label, resizeObserver, showPopup, tooltip } from '@hcengineering/ui' import { Button, IconActivity, IconAdd, Label, resizeObserver, showPopup, Spinner, tooltip } from '@hcengineering/ui'
import Scroller from '@hcengineering/ui/src/components/Scroller.svelte' import Scroller from '@hcengineering/ui/src/components/Scroller.svelte'
import { MarkupPreviewPopup, ObjectPresenter } from '@hcengineering/view-resources' import { MarkupPreviewPopup, ObjectPresenter } from '@hcengineering/view-resources'
import { cosinesim } from '@hcengineering/view-resources/src/utils' import { calcSørensenDiceCoefficient, cosinesim } from '@hcengineering/view-resources/src/utils'
import { createEventDispatcher } from 'svelte' import { createEventDispatcher } from 'svelte'
import recruit from '../plugin' import recruit from '../plugin'
import CreateApplication from './CreateApplication.svelte' import CreateApplication from './CreateApplication.svelte'
@ -50,8 +50,20 @@
state = new Map(res.map((it) => [it._id, it] ?? [])) state = new Map(res.map((it) => [it._id, it] ?? []))
} }
) )
$: vacancyState = state.get(_space as unknown as Ref<DocIndexState>) $: vacancyState = state.get(_space as unknown as Ref<DocIndexState>)
$: scoreState = new Map(
_objects.map((it) => [
it._id,
Math.round(
calcSørensenDiceCoefficient(state.get(it._id)?.fullSummary ?? '', vacancyState?.fullSummary ?? '') * 100
) / 100
])
)
$: _sortedObjects = [..._objects].sort((a, b) => (scoreState.get(b._id) ?? 0) - (scoreState.get(a._id) ?? 0))
const matchQuery = createQuery() const matchQuery = createQuery()
let matches: Map<Ref<Doc>, ApplicantMatch> = new Map() let matches: Map<Ref<Doc>, ApplicantMatch> = new Map()
@ -88,6 +100,7 @@
} }
} }
$: vacancyEmbedding = vacancyState && getEmbedding(vacancyState) $: vacancyEmbedding = vacancyState && getEmbedding(vacancyState)
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const client = getClient() const client = getClient()
@ -126,8 +139,8 @@
'top' 'top'
) )
} }
async function showSummary (doc: Candidate): Promise<void> { async function showSummary (left: DocIndexState, right?: DocIndexState): Promise<void> {
showPopup(IndexedDocumentPreview, { objectId: doc._id }, 'top') showPopup(IndexedDocumentCompare, { left, right }, 'top')
} }
</script> </script>
@ -172,7 +185,7 @@
<div class="p-1"> <div class="p-1">
{#if vacancy} {#if vacancy}
<Scroller> <Scroller>
<div class="flex-col max-h-60"> <div class="flex-col max-h-60 select-text">
{#if vacancy.description} {#if vacancy.description}
{vacancy.description} {vacancy.description}
{/if} {/if}
@ -197,7 +210,7 @@
</thead> </thead>
<tbody> <tbody>
{#each _objects as doc} {#each _sortedObjects as doc}
{@const docState = state.get(doc._id)} {@const docState = state.get(doc._id)}
{@const docEmbedding = docState && getEmbedding(docState)} {@const docEmbedding = docState && getEmbedding(docState)}
{@const match = matches.get(doc._id)} {@const match = matches.get(doc._id)}
@ -213,10 +226,12 @@
{/if} {/if}
</div> </div>
</td> </td>
<td> <td class="whitespace-nowrap">
{#if docEmbedding && vacancyEmbedding} {#if docEmbedding && vacancyEmbedding}
{Math.round(cosinesim(docEmbedding, vacancyEmbedding) * 100)} {Math.round(cosinesim(docEmbedding, vacancyEmbedding) * 100)}
/
{/if} {/if}
{scoreState.get(doc._id) ?? 0}
</td> </td>
<td> <td>
{#if match?.complete} {#if match?.complete}
@ -232,13 +247,13 @@
{#if docState} {#if docState}
<Button <Button
label={recruit.string.PerformMatch} label={recruit.string.PerformMatch}
loading={matching.has(doc._id) || !(match?.complete ?? true)} icon={matching.has(doc._id) || !(match?.complete ?? true) ? Spinner : IconActivity}
on:click={() => requestMatch(doc, docState)} on:click={() => requestMatch(doc, docState)}
/> />
<Button <Button
icon={IconActivity} icon={IconActivity}
showTooltip={{ label: presentation.string.DocumentPreview }} showTooltip={{ label: presentation.string.DocumentPreview }}
on:click={() => showSummary(doc)} on:click={() => showSummary(docState, vacancyState)}
/> />
<Button <Button
icon={IconAdd} icon={IconAdd}

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src", "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"svelte-check": "svelte-check" "svelte-check": "svelte-check",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"@hcengineering/platform-rig": "^0.6.0", "@hcengineering/platform-rig": "^0.6.0",

View File

@ -10,7 +10,8 @@
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"svelte-check": "svelte-check", "svelte-check": "svelte-check",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -29,8 +29,10 @@
dispatch('changeContent') dispatch('changeContent')
}} }}
on:close={() => dispatch('close', null)} on:close={() => dispatch('close', null)}
style:overflow={'auto'}
style:width={'100%'}
> >
<div class="flex-grow mt-4"> <div class="flex-grow p-4">
<MessageViewer message={value} /> <MessageViewer message={value} />
</div> </div>
</div> </div>

View File

@ -601,6 +601,38 @@ export function cosinesim (A: number[], B: number[]): number {
return similarity return similarity
} }
/**
* Calculate SørensenDice coefficient
*/
export function calcSørensenDiceCoefficient (a: string, b: string): number {
const first = a.replace(/\s+/g, '')
const second = b.replace(/\s+/g, '')
if (first === second) return 1 // identical or empty
if (first.length < 2 || second.length < 2) return 0 // if either is a 0-letter or 1-letter string
const firstBigrams = new Map<string, number>()
for (let i = 0; i < first.length - 1; i++) {
const bigram = first.substring(i, i + 2)
const count = (firstBigrams.get(bigram) ?? 0) + 1
firstBigrams.set(bigram, count)
}
let intersectionSize = 0
for (let i = 0; i < second.length - 1; i++) {
const bigram = second.substring(i, i + 2)
const count = firstBigrams.get(bigram) ?? 0
if (count > 0) {
firstBigrams.set(bigram, count - 1)
intersectionSize++
}
}
return (2.0 * intersectionSize) / (first.length + second.length - 2)
}
/** /**
* @public * @public
*/ */

View File

@ -9,7 +9,8 @@
"build:docs": "api-extractor run --local", "build:docs": "api-extractor run --local",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
}, },
"devDependencies": { "devDependencies": {
"svelte-loader": "^3.1.3", "svelte-loader": "^3.1.3",

View File

@ -84,7 +84,6 @@
"@hcengineering/minio": "^0.6.0", "@hcengineering/minio": "^0.6.0",
"@hcengineering/openai": "^0.6.0", "@hcengineering/openai": "^0.6.0",
"@hcengineering/translate": "^0.6.0", "@hcengineering/translate": "^0.6.0",
"@hcengineering/login-assets": "^0.6.0", "@hcengineering/login-assets": "^0.6.0",
"@hcengineering/view-assets": "^0.6.0", "@hcengineering/view-assets": "^0.6.0",
"@hcengineering/task-assets": "^0.6.0", "@hcengineering/task-assets": "^0.6.0",
@ -111,7 +110,6 @@
"@hcengineering/document-assets": "^0.6.0", "@hcengineering/document-assets": "^0.6.0",
"@hcengineering/bitrix-assets": "^0.6.0", "@hcengineering/bitrix-assets": "^0.6.0",
"@hcengineering/request-assets": "^0.6.0", "@hcengineering/request-assets": "^0.6.0",
"@hcengineering/login": "^0.6.1", "@hcengineering/login": "^0.6.1",
"@hcengineering/view": "^0.6.2", "@hcengineering/view": "^0.6.2",
"@hcengineering/task": "^0.6.1", "@hcengineering/task": "^0.6.1",

View File

@ -14,6 +14,7 @@
// //
import { import {
coreId,
DOMAIN_BLOB, DOMAIN_BLOB,
DOMAIN_FULLTEXT_BLOB, DOMAIN_FULLTEXT_BLOB,
DOMAIN_MODEL, DOMAIN_MODEL,
@ -99,6 +100,8 @@ import { trackerId } from '@hcengineering/tracker'
import { viewId } from '@hcengineering/view' import { viewId } from '@hcengineering/view'
import { workbenchId } from '@hcengineering/workbench' import { workbenchId } from '@hcengineering/workbench'
import coreEng from '@hcengineering/core/src/lang/en.json'
import loginEng from '@hcengineering/login-assets/lang/en.json' import loginEng from '@hcengineering/login-assets/lang/en.json'
import taskEn from '@hcengineering/task-assets/lang/en.json' import taskEn from '@hcengineering/task-assets/lang/en.json'
@ -126,6 +129,8 @@ import hrEn from '@hcengineering/hr-assets/lang/en.json'
import documentEn from '@hcengineering/document-assets/lang/en.json' import documentEn from '@hcengineering/document-assets/lang/en.json'
import bitrixEn from '@hcengineering/bitrix-assets/lang/en.json' import bitrixEn from '@hcengineering/bitrix-assets/lang/en.json'
import requestEn from '@hcengineering/request-assets/lang/en.json' import requestEn from '@hcengineering/request-assets/lang/en.json'
addStringsLoader(coreId, async (lang: string) => coreEng)
addStringsLoader(loginId, async (lang: string) => loginEng) addStringsLoader(loginId, async (lang: string) => loginEng)
addStringsLoader(taskId, async (lang: string) => taskEn) addStringsLoader(taskId, async (lang: string) => taskEn)
@ -190,7 +195,7 @@ export function start (
ConfigurationMiddleware.create ConfigurationMiddleware.create
] ]
const fullText = getMetricsContext().newChild('fulltext', {}) const metrics = getMetricsContext().newChild('indexing', {})
function createIndexStages ( function createIndexStages (
fullText: MeasureContext, fullText: MeasureContext,
workspace: WorkspaceId, workspace: WorkspaceId,
@ -243,7 +248,7 @@ export function start (
return startJsonRpc( return startJsonRpc(
getMetricsContext(), getMetricsContext(),
(workspace: WorkspaceId, upgrade: boolean) => { (workspace, upgrade, broadcast) => {
const conf: DbConfiguration = { const conf: DbConfiguration = {
domains: { domains: {
[DOMAIN_TX]: 'MongoTx', [DOMAIN_TX]: 'MongoTx',
@ -252,6 +257,7 @@ export function start (
[DOMAIN_FULLTEXT_BLOB]: 'FullTextBlob', [DOMAIN_FULLTEXT_BLOB]: 'FullTextBlob',
[DOMAIN_MODEL]: 'Null' [DOMAIN_MODEL]: 'Null'
}, },
metrics,
defaultAdapter: 'Mongo', defaultAdapter: 'Mongo',
adapters: { adapters: {
MongoTx: { MongoTx: {
@ -282,14 +288,19 @@ export function start (
fulltextAdapter: { fulltextAdapter: {
factory: createElasticAdapter, factory: createElasticAdapter,
url: fullTextUrl, url: fullTextUrl,
metrics: fullText,
stages: (adapter, storage, storageAdapter, contentAdapter) => stages: (adapter, storage, storageAdapter, contentAdapter) =>
createIndexStages(fullText, workspace, adapter, storage, storageAdapter, contentAdapter) createIndexStages(
metrics.newChild('stages', {}),
workspace,
adapter,
storage,
storageAdapter,
contentAdapter
)
}, },
contentAdapter: { contentAdapter: {
factory: createRekoniAdapter, factory: createRekoniAdapter,
url: rekoniUrl, url: rekoniUrl
metrics: getMetricsContext().newChild('content', {})
}, },
storageFactory: () => storageFactory: () =>
new MinioService({ new MinioService({
@ -299,7 +310,7 @@ export function start (
}), }),
workspace workspace
} }
return createPipeline(conf, middlewares, upgrade) return createPipeline(conf, middlewares, upgrade, broadcast)
}, },
(token: Token, pipeline: Pipeline, broadcast: BroadcastCall) => { (token: Token, pipeline: Pipeline, broadcast: BroadcastCall) => {
if (token.extra?.mode === 'backup') { if (token.extra?.mode === 'backup') {

View File

@ -107,7 +107,7 @@ export class OpenAIEmbeddingsStage implements FullTextPipelineStage {
updateSummary (summary: FullSummaryStage): void { updateSummary (summary: FullSummaryStage): void {
summary.fieldFilter.push((attr, value) => { summary.fieldFilter.push((attr, value) => {
const tMarkup = attr.type._class === core.class.TypeMarkup const tMarkup = attr.type._class === core.class.TypeMarkup
const lowerCase = value.toLocaleLowerCase() const lowerCase: string = value.toLocaleLowerCase()
if (tMarkup && (lowerCase.includes('gpt:') || lowerCase.includes('gpt Answer:'))) { if (tMarkup && (lowerCase.includes('gpt:') || lowerCase.includes('gpt Answer:'))) {
return false return false
} }
@ -253,11 +253,11 @@ export class OpenAIEmbeddingsStage implements FullTextPipelineStage {
const docs = await this.adapter.searchEmbedding(_classes, query, embedding, { const docs = await this.adapter.searchEmbedding(_classes, query, embedding, {
size, size,
from, from,
minScore: -100, minScore: -9,
embeddingBoost: 100, embeddingBoost: 10,
field: this.field, field: this.field,
field_enable: this.field_enabled, field_enable: this.field_enabled,
fulltextBoost: 10 fulltextBoost: 1
}) })
return { return {
docs, docs,

View File

@ -17,7 +17,7 @@ import type { Plugin, Resource } from '@hcengineering/platform'
import { plugin } from '@hcengineering/platform' import { plugin } from '@hcengineering/platform'
import type { Account, Class, Ref } from '@hcengineering/core' import type { Account, Class, Ref } from '@hcengineering/core'
import { TriggerFunc } from '@hcengineering/server-core' import { AsyncTriggerFunc } from '@hcengineering/server-core'
import type { OpenAIConfiguration } from './types' import type { OpenAIConfiguration } from './types'
export * from './types' export * from './types'
@ -31,7 +31,7 @@ export const openAIId = 'openai' as Plugin
*/ */
const openaiPlugin = plugin(openAIId, { const openaiPlugin = plugin(openAIId, {
trigger: { trigger: {
OnGPTRequest: '' as Resource<TriggerFunc> AsyncOnGPTRequest: '' as Resource<AsyncTriggerFunc>
}, },
class: { class: {
OpenAIConfiguration: '' as Ref<Class<OpenAIConfiguration>> OpenAIConfiguration: '' as Ref<Class<OpenAIConfiguration>>

View File

@ -26,18 +26,18 @@ import core, {
TxCUD, TxCUD,
TxProcessor TxProcessor
} from '@hcengineering/core' } from '@hcengineering/core'
import type { TriggerControl } from '@hcengineering/server-core' import recruit, { ApplicantMatch } from '@hcengineering/recruit'
import type { AsyncTriggerControl } from '@hcengineering/server-core'
import got from 'got' import got from 'got'
import { convert } from 'html-to-text' import { convert } from 'html-to-text'
import { chunks, encode } from './encoder/encoder' import { chunks } from './encoder/encoder'
import openai, { OpenAIConfiguration, openAIRatelimitter } from './plugin' import openai, { OpenAIConfiguration, openAIRatelimitter } from './plugin'
import recruit, { ApplicantMatch } from '@hcengineering/recruit'
const model = 'text-davinci-003' const model = 'text-davinci-003'
const defaultOptions = { const defaultOptions = {
max_tokens: 4000, max_tokens: 4000,
temperature: 0.9, temperature: 0.2,
top_p: 1, top_p: 1,
n: 1, n: 1,
stop: null as string | null stop: null as string | null
@ -46,16 +46,31 @@ const defaultOptions = {
async function performCompletion ( async function performCompletion (
prompt: string, prompt: string,
options: typeof defaultOptions, options: typeof defaultOptions,
config: OpenAIConfiguration config: OpenAIConfiguration,
maxLen: number
): Promise<any> { ): Promise<any> {
const ep = config.endpoint + '/completions' const ep = config.endpoint + '/completions'
const chunkedPrompt = chunks(prompt, options.max_tokens - 250)[0] const chunkedPrompt = chunks(prompt, options.max_tokens - maxLen)[0]
const tokens = encode(chunkedPrompt).length
let response: any let response: any
let timeout = 50
const st = Date.now()
const request: Record<string, any> = {
model,
prompt: chunkedPrompt,
max_tokens: maxLen,
temperature: options.temperature,
top_p: options.top_p,
n: options.n,
stream: false
}
if (options.stop != null) {
request.stop = options.stop
}
while (true) { while (true) {
try { try {
console.info('Sending request to OpenAI')
response = await openAIRatelimitter.exec( response = await openAIRatelimitter.exec(
async () => async () =>
await got await got
@ -64,18 +79,8 @@ async function performCompletion (
'Content-Type': 'application/json', 'Content-Type': 'application/json',
Authorization: `Bearer ${config.token}` Authorization: `Bearer ${config.token}`
}, },
json: { json: request,
model, timeout: 60000
prompt: chunkedPrompt,
max_tokens: options.max_tokens - tokens,
temperature: options.temperature,
top_p: options.top_p,
n: options.n,
stream: false,
logprobs: null,
stop: options.stop
},
timeout: 180000
}) })
.json() .json()
) )
@ -84,15 +89,21 @@ async function performCompletion (
const msg = (e.message as string) ?? '' const msg = (e.message as string) ?? ''
if ( if (
msg.includes('Response code 429 (Too Many Requests)') || msg.includes('Response code 429 (Too Many Requests)') ||
msg.includes('Response code 503 (Service Unavailable)') msg.includes('Response code 503 (Service Unavailable)') ||
msg.includes('Response code 400 (Bad Request)')
) { ) {
timeout += 100
console.info('Too many requests, Waiting 1sec to retry.')
await new Promise((resolve) => { await new Promise((resolve) => {
setTimeout(resolve, 1000) setTimeout(resolve, timeout)
}) })
continue continue
} }
if (Date.now() - st > 60000) {
return {}
}
console.error(e) console.error(e)
return [] return {}
} }
} }
return response return response
@ -100,7 +111,7 @@ async function performCompletion (
/** /**
* @public * @public
*/ */
export async function OnGPTRequest (tx: Tx, tc: TriggerControl): Promise<Tx[]> { export async function AsyncOnGPTRequest (tx: Tx, tc: AsyncTriggerControl): Promise<Tx[]> {
const actualTx = TxProcessor.extractTx(tx) const actualTx = TxProcessor.extractTx(tx)
if (tc.hierarchy.isDerived(actualTx._class, core.class.TxCUD) && actualTx.modifiedBy !== openai.account.GPT) { if (tc.hierarchy.isDerived(actualTx._class, core.class.TxCUD) && actualTx.modifiedBy !== openai.account.GPT) {
@ -116,7 +127,7 @@ export async function OnGPTRequest (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
return [] return []
} }
async function handleComment (tx: Tx, tc: TriggerControl): Promise<Tx[]> { async function handleComment (tx: Tx, tc: AsyncTriggerControl): Promise<Tx[]> {
const actualTx = TxProcessor.extractTx(tx) const actualTx = TxProcessor.extractTx(tx)
const cud: TxCUD<Doc> = actualTx as TxCUD<Doc> const cud: TxCUD<Doc> = actualTx as TxCUD<Doc>
@ -178,7 +189,7 @@ async function handleComment (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
const options = parseOptions(split) const options = parseOptions(split)
const response = await performCompletion(prompt, options, config) const response = await performCompletion(prompt, options, config, 1024)
const result: Tx[] = [] const result: Tx[] = []
let finalMsg = msg + '</br>' let finalMsg = msg + '</br>'
@ -205,19 +216,60 @@ async function handleComment (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
) )
// col.modifiedBy = openai.account.GPT // col.modifiedBy = openai.account.GPT
result.push(col) result.push(col)
// Store response transactions
await tc.txFx(async (st) => {
for (const t of result) {
await st.tx(t)
}
})
return result return result
} }
} }
return [] return []
} }
async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
function getText (response: any): string | undefined {
let result = ''
for (const choices of response?.choices ?? []) {
let val = (choices.text as string).trim()
// Add new line before Reason:
val = val.split('\n\n').join('\n')
val = val.replace('Reason:', '\nReason:')
val = val.replace('Candidate is', '\nCandidate is')
val = val.replace(/Match score: (\d+\/\d+|\d+%) /gi, (val) => val + '\n')
val = val.split('\n').join('\n<br/>')
result += val.trim()
}
if (result.length === 0) {
return undefined
}
return result
}
async function summarizeCandidate (config: OpenAIConfiguration, chunks: string[], maxLen: number): Promise<string> {
const options: typeof defaultOptions = {
...defaultOptions,
temperature: 0.1
}
if (chunks.length === 1) {
return chunks[0]
}
const candidateSummaryRequest = `I want you to act as a recruiter.
I will provide some information about candidate, and it will be your job to come up with short and essential summary describing resume.
My first request is "I need help to summarize my CV. ${chunks.join(' ')}`
return getText(await performCompletion(candidateSummaryRequest, options, config, maxLen)) ?? chunks[0]
}
async function summarizeVacancy (config: OpenAIConfiguration, chunks: string[], maxLen: number): Promise<string> {
const options: typeof defaultOptions = {
...defaultOptions,
temperature: 0.1
}
if (chunks.length === 1) {
return chunks[0]
}
const candidateSummaryRequest = `I want you to act as a recruiter.
I will provide some information about vacancy, and it will be your job to come up with short and essential summary describing vacancy.
My first request is "I need help to summarize my Vacancy description. ${chunks.join(' ')}`
return getText(await performCompletion(candidateSummaryRequest, options, config, maxLen)) ?? chunks[0]
}
async function handleApplicantMatch (tx: Tx, tc: AsyncTriggerControl): Promise<Tx[]> {
const [config] = await tc.findAll(openai.class.OpenAIConfiguration, {}) const [config] = await tc.findAll(openai.class.OpenAIConfiguration, {})
if (!(config?.enabled ?? false)) { if (!(config?.enabled ?? false)) {
@ -236,7 +288,7 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
temperature: 0.1 temperature: 0.1
} }
const maxAnswerTokens = 500 const maxAnswerTokens = 256
const maxVacancyTokens = options.max_tokens - maxAnswerTokens / 2 const maxVacancyTokens = options.max_tokens - maxAnswerTokens / 2
const maxCandidateTokens = maxVacancyTokens const maxCandidateTokens = maxVacancyTokens
@ -247,7 +299,9 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
selectors: [{ selector: 'img', format: 'skip' }] selectors: [{ selector: 'img', format: 'skip' }]
}) })
candidateText = chunks(candidateText, maxCandidateTokens)[0] const candidateTextC = chunks(candidateText, maxCandidateTokens)
candidateText = await summarizeCandidate(config, candidateTextC, maxCandidateTokens)
let vacancyText = cud.attributes.vacancy let vacancyText = cud.attributes.vacancy
@ -255,18 +309,19 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
preserveNewlines: true, preserveNewlines: true,
selectors: [{ selector: 'img', format: 'skip' }] selectors: [{ selector: 'img', format: 'skip' }]
}) })
vacancyText = chunks(vacancyText, maxVacancyTokens)[0] vacancyText = await summarizeVacancy(config, chunks(vacancyText, maxVacancyTokens), maxVacancyTokens)
// Enabled, we could complete. const text = `'I want you to act as a recruiter. I will provide some information about vacancy and resume, and it will be your job to come up with solution why candidate is matching vacancy. Please considering following vacancy:\n ${vacancyText}\n and please write if following candidate good match for vacancy and why:\n ${candidateText}\n`
// const text = `I want you to act as a recruiter.
// I will provide some information about vacancy and resume, and it will be your job to come up with solution why candidate is matching vacancy.
// My first request is "I need help to match vacancy ${vacancyText} and CV: ${candidateText}”`
const text = `'Considering following vacancy:\n ${vacancyText}\n write if following candidate good for vacancy and why:\n ${candidateText}\n` const response = await performCompletion(text, options, config, maxAnswerTokens)
const response = await performCompletion(text, options, config)
const result: Tx[] = [] const result: Tx[] = []
let finalMsg = '' let finalMsg = ''
for (const choices of response.choices) { for (const choices of response?.choices ?? []) {
let val = (choices.text as string).trim() let val = (choices.text as string).trim()
// Add new line before Reason: // Add new line before Reason:
val = val.split('\n\n').join('\n') val = val.split('\n\n').join('\n')
@ -291,13 +346,6 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
) )
// col.modifiedBy = openai.account.GPT // col.modifiedBy = openai.account.GPT
result.push(col) result.push(col)
// Store response transactions
await tc.txFx(async (st) => {
for (const t of result) {
await st.tx(t)
}
})
return result return result
} }
@ -306,7 +354,7 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
*/ */
export const openAIPluginImpl = async () => ({ export const openAIPluginImpl = async () => ({
trigger: { trigger: {
OnGPTRequest AsyncOnGPTRequest
} }
}) })
function parseOptions (split: string[]): typeof defaultOptions { function parseOptions (split: string[]): typeof defaultOptions {

View File

@ -36,10 +36,12 @@ import { Middleware, MiddlewareCreator, Pipeline, SessionContext } from './types
export async function createPipeline ( export async function createPipeline (
conf: DbConfiguration, conf: DbConfiguration,
constructors: MiddlewareCreator[], constructors: MiddlewareCreator[],
upgrade: boolean upgrade: boolean,
broadcast: (tx: Tx[]) => void
): Promise<Pipeline> { ): Promise<Pipeline> {
const storage = await createServerStorage(conf, { const storage = await createServerStorage(conf, {
upgrade upgrade,
broadcast
}) })
return new TPipeline(storage, constructors) return new TPipeline(storage, constructors)
} }

View File

@ -18,7 +18,7 @@ import type { Plugin } from '@hcengineering/platform'
import { plugin } from '@hcengineering/platform' import { plugin } from '@hcengineering/platform'
import type { Class, Ref, Space } from '@hcengineering/core' import type { Class, Ref, Space } from '@hcengineering/core'
import type { ObjectDDParticipant, Trigger } from './types' import type { AsyncTrigger, AsyncTriggerState, ObjectDDParticipant, Trigger } from './types'
/** /**
* @public * @public
@ -30,13 +30,16 @@ export const serverCoreId = 'server-core' as Plugin
*/ */
const serverCore = plugin(serverCoreId, { const serverCore = plugin(serverCoreId, {
class: { class: {
Trigger: '' as Ref<Class<Trigger>> Trigger: '' as Ref<Class<Trigger>>,
AsyncTrigger: '' as Ref<Class<AsyncTrigger>>,
AsyncTriggerState: '' as Ref<Class<AsyncTriggerState>>
}, },
mixin: { mixin: {
ObjectDDParticipant: '' as Ref<ObjectDDParticipant> ObjectDDParticipant: '' as Ref<ObjectDDParticipant>
}, },
space: { space: {
DocIndexState: '' as Ref<Space> DocIndexState: '' as Ref<Space>,
TriggerState: '' as Ref<Space>
} }
}) })

View File

@ -0,0 +1,123 @@
import core, {
Class,
Doc,
Hierarchy,
MeasureContext,
ModelDb,
Ref,
ServerStorage,
Tx,
TxCUD,
TxFactory,
TxProcessor
} from '@hcengineering/core'
import { getResource } from '@hcengineering/platform'
import plugin from '../plugin'
import { AsyncTrigger, AsyncTriggerControl, AsyncTriggerFunc } from '../types'
/**
* @public
*/
export class AsyncTriggerProcessor {
canceling: boolean = false
processing: Promise<void> | undefined
triggers: AsyncTrigger[] = []
classes: Ref<Class<Doc>>[] = []
factory = new TxFactory(core.account.System)
functions: AsyncTriggerFunc[] = []
trigger = (): void => {}
control: AsyncTriggerControl
constructor (
readonly model: ModelDb,
readonly hierarchy: Hierarchy,
readonly storage: ServerStorage,
readonly metrics: MeasureContext
) {
this.control = {
hierarchy: this.hierarchy,
modelDb: this.model,
txFactory: this.factory,
findAll: async (_class, query, options) => {
return await this.storage.findAll(this.metrics, _class, query, options)
}
}
}
async cancel (): Promise<void> {
this.canceling = true
await this.processing
}
async start (): Promise<void> {
await this.updateTriggers()
this.processing = this.doProcessing()
}
async updateTriggers (): Promise<void> {
try {
this.triggers = await this.model.findAll(plugin.class.AsyncTrigger, {})
this.classes = this.triggers.reduce<Ref<Class<Doc>>[]>((arr, it) => arr.concat(it.classes), [])
this.functions = await Promise.all(this.triggers.map(async (trigger) => await getResource(trigger.trigger)))
} catch (err: any) {
console.error(err)
}
}
async tx (tx: Tx[]): Promise<void> {
const result: Tx[] = []
for (const _tx of tx) {
const actualTx = TxProcessor.extractTx(_tx)
if (
this.hierarchy.isDerived(actualTx._class, core.class.TxCUD) &&
this.hierarchy.isDerived(_tx._class, core.class.TxCUD)
) {
const cud = actualTx as TxCUD<Doc>
if (this.classes.some((it) => this.hierarchy.isDerived(cud.objectClass, it))) {
// We need processing
result.push(
this.factory.createTxCreateDoc(plugin.class.AsyncTriggerState, plugin.space.TriggerState, {
tx: _tx as TxCUD<Doc>,
message: 'Processing...'
})
)
}
}
}
if (result.length > 0) {
await this.storage.apply(this.metrics, result, false)
this.processing = this.doProcessing()
}
}
private async doProcessing (): Promise<void> {
while (!this.canceling) {
const docs = await this.storage.findAll(this.metrics, plugin.class.AsyncTriggerState, {}, { limit: 10 })
if (docs.length === 0) {
return
}
for (const doc of docs) {
const result: Tx[] = []
if (this.canceling) {
break
}
try {
for (const f of this.functions) {
result.push(...(await f(doc.tx, this.control)))
}
} catch (err: any) {}
await this.storage.apply(this.metrics, [this.factory.createTxRemoveDoc(doc._class, doc.space, doc._id)], false)
await this.storage.apply(this.metrics, result, true)
}
}
}
}

View File

@ -54,13 +54,15 @@ import { FullTextIndex } from './fulltext'
import { FullTextIndexPipeline } from './indexer' import { FullTextIndexPipeline } from './indexer'
import { FullTextPipelineStage } from './indexer/types' import { FullTextPipelineStage } from './indexer/types'
import serverCore from './plugin' import serverCore from './plugin'
import { AsyncTriggerProcessor } from './processor'
import { Triggers } from './triggers' import { Triggers } from './triggers'
import type { import type {
ContentAdapterFactory, ContentAdapterFactory,
ContentTextAdapter, ContentTextAdapter,
FullTextAdapter, FullTextAdapter,
FullTextAdapterFactory, FullTextAdapterFactory,
ObjectDDParticipant ObjectDDParticipant,
TriggerControl
} from './types' } from './types'
import { createCacheFindAll } from './utils' import { createCacheFindAll } from './utils'
@ -82,16 +84,15 @@ export interface DbConfiguration {
domains: Record<string, string> domains: Record<string, string>
defaultAdapter: string defaultAdapter: string
workspace: WorkspaceId workspace: WorkspaceId
metrics: MeasureContext
fulltextAdapter: { fulltextAdapter: {
factory: FullTextAdapterFactory factory: FullTextAdapterFactory
url: string url: string
metrics: MeasureContext
stages: FullTextPipelineStageFactory stages: FullTextPipelineStageFactory
} }
contentAdapter: { contentAdapter: {
factory: ContentAdapterFactory factory: ContentAdapterFactory
url: string url: string
metrics: MeasureContext
} }
storageFactory?: () => MinioService storageFactory?: () => MinioService
} }
@ -99,6 +100,7 @@ export interface DbConfiguration {
class TServerStorage implements ServerStorage { class TServerStorage implements ServerStorage {
private readonly fulltext: FullTextIndex private readonly fulltext: FullTextIndex
hierarchy: Hierarchy hierarchy: Hierarchy
triggerProcessor: AsyncTriggerProcessor
scopes = new Map<string, Promise<any>>() scopes = new Map<string, Promise<any>>()
@ -112,16 +114,19 @@ class TServerStorage implements ServerStorage {
readonly storageAdapter: MinioService | undefined, readonly storageAdapter: MinioService | undefined,
readonly modelDb: ModelDb, readonly modelDb: ModelDb,
private readonly workspace: WorkspaceId, private readonly workspace: WorkspaceId,
private readonly contentAdapter: ContentTextAdapter,
readonly indexFactory: (storage: ServerStorage) => FullTextIndex, readonly indexFactory: (storage: ServerStorage) => FullTextIndex,
options?: ServerStorageOptions readonly options: ServerStorageOptions,
metrics: MeasureContext
) { ) {
this.hierarchy = hierarchy this.hierarchy = hierarchy
this.fulltext = indexFactory(this) this.fulltext = indexFactory(this)
this.triggerProcessor = new AsyncTriggerProcessor(modelDb, hierarchy, this, metrics.newChild('triggers', {}))
void this.triggerProcessor.start()
} }
async close (): Promise<void> { async close (): Promise<void> {
await this.fulltext.close() await this.fulltext.close()
await this.triggerProcessor.cancel()
for (const o of this.adapters.values()) { for (const o of this.adapters.values()) {
await o.close() await o.close()
} }
@ -549,11 +554,7 @@ class TServerStorage implements ServerStorage {
) )
const moves = await ctx.with('process-move', {}, () => this.processMove(ctx, txes, findAll)) const moves = await ctx.with('process-move', {}, () => this.processMove(ctx, txes, findAll))
const triggers = await ctx.with('process-triggers', {}, async (ctx) => { const triggerControl: Omit<TriggerControl, 'txFactory'> = {
const result: Tx[] = []
for (const tx of txes) {
result.push(
...(await this.triggers.apply(tx.modifiedBy, tx, {
removedMap, removedMap,
workspace: this.workspace, workspace: this.workspace,
fx: triggerFx.fx, fx: triggerFx.fx,
@ -568,12 +569,13 @@ class TServerStorage implements ServerStorage {
}, },
findAll: fAll(ctx), findAll: fAll(ctx),
modelDb: this.modelDb, modelDb: this.modelDb,
hierarchy: this.hierarchy, hierarchy: this.hierarchy
txFx: async (f) => {
await f(this.getAdapter(DOMAIN_TX))
} }
})) const triggers = await ctx.with('process-triggers', {}, async (ctx) => {
) const result: Tx[] = []
for (const tx of txes) {
result.push(...(await this.triggers.apply(tx.modifiedBy, tx, triggerControl)))
await ctx.with('async-triggers', {}, (ctx) => this.triggerProcessor.tx([tx]))
} }
return result return result
}) })
@ -639,6 +641,40 @@ class TServerStorage implements ServerStorage {
return { passed, onEnd } return { passed, onEnd }
} }
async apply (ctx: MeasureContext, tx: Tx[], broadcast: boolean): Promise<Tx[]> {
const triggerFx = new Effects()
const cacheFind = createCacheFindAll(this)
const txToStore = tx.filter(
(it) => it.space !== core.space.DerivedTx && !this.hierarchy.isDerived(it._class, core.class.TxApplyIf)
)
await ctx.with('domain-tx', {}, async () => await this.getAdapter(DOMAIN_TX).tx(...txToStore))
await ctx.with('apply', {}, (ctx) => this.routeTx(ctx, ...tx))
// send transactions
if (broadcast) {
this.options?.broadcast?.(tx)
}
// invoke triggers and store derived objects
const derived = await this.proccessDerived(ctx, tx, triggerFx, cacheFind, new Map<Ref<Doc>, Doc>())
// index object
for (const _tx of tx) {
await ctx.with('fulltext', {}, (ctx) => this.fulltext.tx(ctx, _tx))
}
// index derived objects
for (const tx of derived) {
await ctx.with('derived-processor', { _class: txClass(tx) }, (ctx) => this.fulltext.tx(ctx, tx))
}
for (const fx of triggerFx.effects) {
await fx()
}
return [...tx, ...derived]
}
async tx (ctx: MeasureContext, tx: Tx): Promise<[TxResult, Tx[]]> { async tx (ctx: MeasureContext, tx: Tx): Promise<[TxResult, Tx[]]> {
// store tx // store tx
const _class = txClass(tx) const _class = txClass(tx)
@ -753,13 +789,15 @@ export interface ServerStorageOptions {
// Indexing is not required to be started for upgrade mode. // Indexing is not required to be started for upgrade mode.
upgrade: boolean upgrade: boolean
broadcast?: (tx: Tx[]) => void
} }
/** /**
* @public * @public
*/ */
export async function createServerStorage ( export async function createServerStorage (
conf: DbConfiguration, conf: DbConfiguration,
options?: ServerStorageOptions options: ServerStorageOptions
): Promise<ServerStorage> { ): Promise<ServerStorage> {
const hierarchy = new Hierarchy() const hierarchy = new Hierarchy()
const triggers = new Triggers() const triggers = new Triggers()
@ -803,13 +841,15 @@ export async function createServerStorage (
const fulltextAdapter = await conf.fulltextAdapter.factory( const fulltextAdapter = await conf.fulltextAdapter.factory(
conf.fulltextAdapter.url, conf.fulltextAdapter.url,
conf.workspace, conf.workspace,
conf.fulltextAdapter.metrics conf.metrics.newChild('fulltext', {})
) )
const metrics = conf.metrics.newChild('server-storage', {})
const contentAdapter = await conf.contentAdapter.factory( const contentAdapter = await conf.contentAdapter.factory(
conf.contentAdapter.url, conf.contentAdapter.url,
conf.workspace, conf.workspace,
conf.contentAdapter.metrics metrics.newChild('content', {})
) )
const defaultAdapter = adapters.get(conf.defaultAdapter) const defaultAdapter = adapters.get(conf.defaultAdapter)
@ -827,7 +867,7 @@ export async function createServerStorage (
stages, stages,
hierarchy, hierarchy,
conf.workspace, conf.workspace,
fulltextAdapter.metrics(), metrics.newChild('fulltext', {}),
modelDb modelDb
) )
return new FullTextIndex( return new FullTextIndex(
@ -837,10 +877,9 @@ export async function createServerStorage (
storageAdapter, storageAdapter,
conf.workspace, conf.workspace,
indexer, indexer,
options?.upgrade ?? false options.upgrade ?? false
) )
} }
return new TServerStorage( return new TServerStorage(
conf.domains, conf.domains,
conf.defaultAdapter, conf.defaultAdapter,
@ -851,9 +890,9 @@ export async function createServerStorage (
storageAdapter, storageAdapter,
modelDb, modelDb,
conf.workspace, conf.workspace,
contentAdapter,
indexFactory, indexFactory,
options options,
metrics
) )
} }

View File

@ -32,6 +32,7 @@ import {
Storage, Storage,
Timestamp, Timestamp,
Tx, Tx,
TxCUD,
TxFactory, TxFactory,
TxResult, TxResult,
WorkspaceId WorkspaceId
@ -112,8 +113,6 @@ export interface TriggerControl {
// Later can be replaced with generic one with bucket encapsulated inside. // Later can be replaced with generic one with bucket encapsulated inside.
storageFx: (f: (adapter: MinioService, workspaceId: WorkspaceId) => Promise<void>) => void storageFx: (f: (adapter: MinioService, workspaceId: WorkspaceId) => Promise<void>) => void
fx: (f: () => Promise<void>) => void fx: (f: () => Promise<void>) => void
txFx: (f: (storage: Storage) => Promise<void>) => Promise<void>
} }
/** /**
@ -121,6 +120,20 @@ export interface TriggerControl {
*/ */
export type TriggerFunc = (tx: Tx, ctrl: TriggerControl) => Promise<Tx[]> export type TriggerFunc = (tx: Tx, ctrl: TriggerControl) => Promise<Tx[]>
/**
* @public
*/
export interface AsyncTriggerControl {
txFactory: TxFactory
findAll: Storage['findAll']
hierarchy: Hierarchy
modelDb: ModelDb
}
/**
* @public
*/
export type AsyncTriggerFunc = (tx: Tx, ctrl: AsyncTriggerControl) => Promise<Tx[]>
/** /**
* @public * @public
*/ */
@ -128,6 +141,22 @@ export interface Trigger extends Doc {
trigger: Resource<TriggerFunc> trigger: Resource<TriggerFunc>
} }
/**
* @public
*/
export interface AsyncTrigger extends Doc {
trigger: Resource<AsyncTriggerFunc>
classes: Ref<Class<Doc>>[]
}
/**
* @public
*/
export interface AsyncTriggerState extends Doc {
tx: TxCUD<Doc>
message: string
}
/** /**
* @public * @public
*/ */

View File

@ -196,22 +196,22 @@ class ElasticAdapter implements FullTextAdapter {
} }
}, },
script: { script: {
source: `Math.abs(cosineSimilarity(params.queryVector, '${options.field}')) + 1`, source: `cosineSimilarity(params.queryVector, '${options.field}') + 1`,
params: { params: {
queryVector: embedding queryVector: embedding
} }
}, },
boost: options.embeddingBoost ?? 100.0 boost: options.embeddingBoost ?? 10.0
}
},
{
simple_query_string: {
query: search.$search,
flags: 'OR|PREFIX|PHRASE',
default_operator: 'and',
boost: options.fulltextBoost ?? 1
} }
} }
// ,{
// simple_query_string: {
// query: search.$search,
// flags: 'OR|PREFIX|PHRASE',
// default_operator: 'and',
// boost: options.fulltextBoost ?? 1
// }
// }
], ],
filter: [ filter: [
{ {
@ -235,9 +235,10 @@ class ElasticAdapter implements FullTextAdapter {
const sourceHits = result.body.hits.hits const sourceHits = result.body.hits.hits
const min = options?.minScore ?? 75 const min = options?.minScore ?? 75
const hits: any[] = sourceHits.filter((it: any) => it._score > min) const embBoost = options.embeddingBoost ?? 10.0
return hits.map((hit) => ({ ...hit._source, _score: hit._score - (options.embeddingBoost ?? 100.0) })) const hits: any[] = sourceHits.filter((it: any) => it._score - embBoost > min)
return hits.map((hit) => ({ ...hit._source, _score: hit._score - embBoost }))
} catch (err) { } catch (err) {
console.error(JSON.stringify(err, null, 2)) console.error(JSON.stringify(err, null, 2))
return [] return []

View File

@ -13,10 +13,27 @@
// limitations under the License. // limitations under the License.
// //
import type { Account, Arr, Class, Data, Doc, Mixin, Obj, Ref, TxCreateDoc, TxCUD } from '@hcengineering/core' import core, {
import core, { AttachedDoc, ClassifierKind, DOMAIN_MODEL, DOMAIN_TX, TxFactory } from '@hcengineering/core' Account,
Arr,
AttachedDoc,
Class,
ClassifierKind,
Data,
Doc,
DOMAIN_DOC_INDEX_STATE,
DOMAIN_MODEL,
DOMAIN_TX,
Mixin,
Obj,
Ref,
TxCreateDoc,
TxCUD,
TxFactory
} from '@hcengineering/core'
import type { IntlString, Plugin } from '@hcengineering/platform' import type { IntlString, Plugin } from '@hcengineering/platform'
import { plugin } from '@hcengineering/platform' import { plugin } from '@hcengineering/platform'
import server from '@hcengineering/server-core'
export const txFactory = new TxFactory(core.account.System) export const txFactory = new TxFactory(core.account.System)
@ -101,6 +118,30 @@ export function genMinModel (): TxCUD<Doc>[] {
domain: DOMAIN_MODEL domain: DOMAIN_MODEL
}) })
) )
txes.push(
createClass(core.class.DocIndexState, {
label: 'DocIndexState' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_DOC_INDEX_STATE
})
)
txes.push(
createClass(server.class.AsyncTrigger, {
label: 'AsyncTrigger' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_MODEL
})
)
txes.push(
createClass(server.class.AsyncTriggerState, {
label: 'AsyncTriggerState' as IntlString,
extends: core.class.Doc,
kind: ClassifierKind.CLASS,
domain: DOMAIN_DOC_INDEX_STATE
})
)
txes.push( txes.push(
createClass(core.class.Account, { createClass(core.class.Account, {
label: 'Account' as IntlString, label: 'Account' as IntlString,

View File

@ -143,21 +143,20 @@ describe('mongo operations', () => {
url: '' url: ''
} }
}, },
metrics: new MeasureMetricsContext('', {}),
fulltextAdapter: { fulltextAdapter: {
factory: createNullFullTextAdapter, factory: createNullFullTextAdapter,
url: '', url: '',
stages: () => [], stages: () => []
metrics: new MeasureMetricsContext('', {})
}, },
contentAdapter: { contentAdapter: {
factory: createNullContentTextAdapter, factory: createNullContentTextAdapter,
url: '', url: ''
metrics: new MeasureMetricsContext('', {})
}, },
workspace: getWorkspaceId(dbId, ''), workspace: getWorkspaceId(dbId, ''),
storageFactory: () => createNullStorageFactory() storageFactory: () => createNullStorageFactory()
} }
const serverStorage = await createServerStorage(conf) const serverStorage = await createServerStorage(conf, { upgrade: false })
const ctx = new MeasureMetricsContext('client', {}) const ctx = new MeasureMetricsContext('client', {})
client = await createClient(async (handler) => { client = await createClient(async (handler) => {
const st: ClientConnection = { const st: ClientConnection = {

View File

@ -19,6 +19,7 @@ import core, {
Ref, Ref,
Space, Space,
toWorkspaceString, toWorkspaceString,
Tx,
TxFactory, TxFactory,
WorkspaceId WorkspaceId
} from '@hcengineering/core' } from '@hcengineering/core'
@ -27,7 +28,7 @@ import type { Pipeline } from '@hcengineering/server-core'
import { decodeToken, Token } from '@hcengineering/server-token' import { decodeToken, Token } from '@hcengineering/server-token'
import { createServer, IncomingMessage } from 'http' import { createServer, IncomingMessage } from 'http'
import WebSocket, { WebSocketServer } from 'ws' import WebSocket, { WebSocketServer } from 'ws'
import { BroadcastCall, Session } from './types' import { BroadcastCall, PipelineFactory, Session } from './types'
let LOGGING_ENABLED = true let LOGGING_ENABLED = true
@ -56,7 +57,7 @@ class SessionManager {
ctx: MeasureContext, ctx: MeasureContext,
ws: WebSocket, ws: WebSocket,
token: Token, token: Token,
pipelineFactory: (ws: WorkspaceId, upgrade: boolean) => Promise<Pipeline>, pipelineFactory: PipelineFactory,
productId: string productId: string
): Promise<Session> { ): Promise<Session> {
const wsString = toWorkspaceString(token.workspace, '@') const wsString = toWorkspaceString(token.workspace, '@')
@ -83,7 +84,7 @@ class SessionManager {
} }
if (LOGGING_ENABLED) console.log('no sessions for workspace', wsString) if (LOGGING_ENABLED) console.log('no sessions for workspace', wsString)
// Re-create pipeline. // Re-create pipeline.
workspace.pipeline = pipelineFactory(token.workspace, true) workspace.pipeline = pipelineFactory(token.workspace, true, (tx) => this.broadcastAll(workspace as Workspace, tx))
const pipeline = await workspace.pipeline const pipeline = await workspace.pipeline
const session = this.createSession(token, pipeline) const session = this.createSession(token, pipeline)
@ -103,14 +104,20 @@ class SessionManager {
return session return session
} }
private createWorkspace ( broadcastAll (workspace: Workspace, tx: Tx[]): void {
pipelineFactory: (ws: WorkspaceId, upgrade: boolean) => Promise<Pipeline>, for (const _tx of tx) {
token: Token const msg = serialize({ result: _tx })
): Workspace { for (const session of workspace.sessions) {
session[1].send(msg)
}
}
}
private createWorkspace (pipelineFactory: PipelineFactory, token: Token): Workspace {
const upgrade = token.extra?.model === 'upgrade' const upgrade = token.extra?.model === 'upgrade'
const workspace = { const workspace: Workspace = {
id: generateId(), id: generateId(),
pipeline: pipelineFactory(token.workspace, upgrade), pipeline: pipelineFactory(token.workspace, upgrade, (tx) => this.broadcastAll(workspace, tx)),
sessions: [], sessions: [],
upgrade upgrade
} }
@ -305,7 +312,7 @@ async function handleRequest<S extends Session> (
*/ */
export function start ( export function start (
ctx: MeasureContext, ctx: MeasureContext,
pipelineFactory: (workspace: WorkspaceId, upgrade: boolean) => Promise<Pipeline>, pipelineFactory: PipelineFactory,
sessionFactory: (token: Token, pipeline: Pipeline, broadcast: BroadcastCall) => Session, sessionFactory: (token: Token, pipeline: Pipeline, broadcast: BroadcastCall) => Session,
port: number, port: number,
productId: string, productId: string,

View File

@ -38,3 +38,8 @@ export type BroadcastCall = (
resp: Response<any>, resp: Response<any>,
target?: string target?: string
) => void ) => void
/**
* @public
*/
export type PipelineFactory = (ws: WorkspaceId, upgrade: boolean, broadcast: (tx: Tx[]) => void) => Promise<Pipeline>

View File

@ -3,6 +3,7 @@
"version": "0.6.0", "version": "0.6.0",
"scripts": { "scripts": {
"build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build", "build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build",
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"lint": "svelte-check && eslint", "lint": "svelte-check && eslint",
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src" "format": "prettier --write --plugin-search-dir=. src && eslint --fix src"

18
tools/apm/config/rig.json Normal file
View File

@ -0,0 +1,18 @@
// The "rig.json" file directs tools to look for their config files in an external package.
// Documentation for this system: https://www.npmjs.com/package/@rushstack/rig-package
{
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
/**
* (Required) The name of the rig package to inherit from.
* It should be an NPM package name with the "-rig" suffix.
*/
"rigPackageName": "@hcengineering/platform-rig"
/**
* (Optional) Selects a config profile from the rig package. The name must consist of
* lowercase alphanumeric words separated by hyphens, for example "sample-profile".
* If omitted, then the "default" profile will be used."
*/
// "rigProfile": "your-profile-name"
}

View File

@ -6,7 +6,7 @@
"license": "EPL-2.0", "license": "EPL-2.0",
"bin": "./lib/apm.js", "bin": "./lib/apm.js",
"scripts": { "scripts": {
"build": "heft build && esbuild src/index.ts --bundle --minify --platform=node > ./apm.js && echo 'build'", "build": "heft build",
"build:watch": "tsc", "build:watch": "tsc",
"lint:fix": "eslint --fix src", "lint:fix": "eslint --fix src",
"lint": "eslint src", "lint": "eslint src",