mirror of
https://github.com/hcengineering/platform.git
synced 2024-11-25 09:13:07 +03:00
parent
aa8530d678
commit
199da5077a
@ -302,6 +302,7 @@ specifiers:
|
||||
fast-equals: ^2.0.3
|
||||
file-loader: ^6.2.0
|
||||
filesize: ^8.0.3
|
||||
fork-ts-checker-webpack-plugin: ~7.3.0
|
||||
got: ^11.8.3
|
||||
html-to-text: ^9.0.3
|
||||
html-webpack-plugin: ^5.5.0
|
||||
@ -671,6 +672,7 @@ dependencies:
|
||||
fast-equals: 2.0.4
|
||||
file-loader: 6.2.0_webpack@5.75.0
|
||||
filesize: 8.0.7
|
||||
fork-ts-checker-webpack-plugin: 7.3.0_typescript@4.8.4+webpack@5.75.0
|
||||
got: 11.8.5
|
||||
html-to-text: 9.0.3
|
||||
html-webpack-plugin: 5.5.0_webpack@5.75.0
|
||||
@ -2714,6 +2716,26 @@ packages:
|
||||
defer-to-connect: 2.0.1
|
||||
dev: false
|
||||
|
||||
/@tiptap/core/2.0.0-beta.209_88faf4ceee3e4a6aeb2426d142be0a06:
|
||||
resolution: {integrity: sha512-DOOzfo2XKD5Qt2oEGW33/6ugwSnvpl4WbxtlKdPadLoApk6Kja3K1Eps3pihBgIGmo4tkctkCzmj8wNWS7KeWg==}
|
||||
peerDependencies:
|
||||
prosemirror-commands: ^1.3.1
|
||||
prosemirror-keymap: ^1.2.0
|
||||
prosemirror-model: ^1.18.1
|
||||
prosemirror-schema-list: ^1.2.2
|
||||
prosemirror-state: ^1.4.1
|
||||
prosemirror-transform: ^1.7.0
|
||||
prosemirror-view: ^1.28.2
|
||||
dependencies:
|
||||
prosemirror-commands: 1.5.0
|
||||
prosemirror-keymap: 1.2.0
|
||||
prosemirror-model: 1.18.3
|
||||
prosemirror-schema-list: 1.2.2
|
||||
prosemirror-state: 1.4.2
|
||||
prosemirror-transform: 1.7.0
|
||||
prosemirror-view: 1.29.1
|
||||
dev: false
|
||||
|
||||
/@tiptap/core/2.0.0-beta.209_b7ea67b8e383e94e567bbd2a5d53cb52:
|
||||
resolution: {integrity: sha512-DOOzfo2XKD5Qt2oEGW33/6ugwSnvpl4WbxtlKdPadLoApk6Kja3K1Eps3pihBgIGmo4tkctkCzmj8wNWS7KeWg==}
|
||||
peerDependencies:
|
||||
@ -3024,6 +3046,22 @@ packages:
|
||||
'@tiptap/core': 2.0.0-beta.209_b7ea67b8e383e94e567bbd2a5d53cb52
|
||||
dev: false
|
||||
|
||||
/@tiptap/prosemirror-tables/1.1.4_820a2ec93be59195864136797737fd20:
|
||||
resolution: {integrity: sha512-O2XnDhZV7xTHSFxMMl8Ei3UVeCxuMlbGYZ+J2QG8CzkK8mxDpBa66kFr5DdyAhvdi1ptpcH9u7/GMwItQpN4sA==}
|
||||
peerDependencies:
|
||||
prosemirror-keymap: ^1.1.2
|
||||
prosemirror-model: ^1.8.1
|
||||
prosemirror-state: ^1.3.1
|
||||
prosemirror-transform: ^1.2.1
|
||||
prosemirror-view: ^1.13.3
|
||||
dependencies:
|
||||
prosemirror-keymap: 1.2.0
|
||||
prosemirror-model: 1.18.3
|
||||
prosemirror-state: 1.4.2
|
||||
prosemirror-transform: 1.7.0
|
||||
prosemirror-view: 1.29.1
|
||||
dev: false
|
||||
|
||||
/@tiptap/prosemirror-tables/1.1.4_825d0bccef8ba664696e76031c65278e:
|
||||
resolution: {integrity: sha512-O2XnDhZV7xTHSFxMMl8Ei3UVeCxuMlbGYZ+J2QG8CzkK8mxDpBa66kFr5DdyAhvdi1ptpcH9u7/GMwItQpN4sA==}
|
||||
peerDependencies:
|
||||
@ -3074,6 +3112,41 @@ packages:
|
||||
- prosemirror-view
|
||||
dev: false
|
||||
|
||||
/@tiptap/starter-kit/2.0.0-beta.209_4bc2915a7798d139ce161d1059f26dca:
|
||||
resolution: {integrity: sha512-uR68ZfDZ5PeygGey3xc9ZuFIP+K7VRElrABnZcM6t9/Crrs70UFwSTNlkS0ezx9woj8h+8N78a6r8W1YC04TOw==}
|
||||
dependencies:
|
||||
'@tiptap/core': 2.0.0-beta.209_88faf4ceee3e4a6aeb2426d142be0a06
|
||||
'@tiptap/extension-blockquote': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-bold': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-bullet-list': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-code': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-code-block': 2.0.0-beta.209_7c4b41d3b933ab120335c25fb0c64f89
|
||||
'@tiptap/extension-document': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-dropcursor': 2.0.0-beta.209_8146261db7b5e4b7ca8f9611f0136fd1
|
||||
'@tiptap/extension-gapcursor': 2.0.0-beta.209_ae82587580408645a82c84b21276f03e
|
||||
'@tiptap/extension-hard-break': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-heading': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-history': 2.0.0-beta.209_e2f83d342d3e4fc6f345741d5563b0d1
|
||||
'@tiptap/extension-horizontal-rule': 2.0.0-beta.209_7c4b41d3b933ab120335c25fb0c64f89
|
||||
'@tiptap/extension-italic': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-list-item': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-ordered-list': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-paragraph': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-strike': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-text': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
transitivePeerDependencies:
|
||||
- prosemirror-commands
|
||||
- prosemirror-dropcursor
|
||||
- prosemirror-gapcursor
|
||||
- prosemirror-history
|
||||
- prosemirror-keymap
|
||||
- prosemirror-model
|
||||
- prosemirror-schema-list
|
||||
- prosemirror-state
|
||||
- prosemirror-transform
|
||||
- prosemirror-view
|
||||
dev: false
|
||||
|
||||
/@tiptap/suggestion/2.0.0-beta.209_6d771ef4ce96210c593056969447c447:
|
||||
resolution: {integrity: sha512-KKV64rTzTGY1q03nK0b4wCrAmihwThYJrYlPTUTelQm0AeJ4EPTNMRSR5rHD+fVF7agqrtrCkMw46vTXd6j1Jw==}
|
||||
peerDependencies:
|
||||
@ -16248,12 +16321,12 @@ packages:
|
||||
dev: false
|
||||
|
||||
file:projects/text-editor.tgz_1e646fd62a3adc9d0b41f256dafe3d38:
|
||||
resolution: {integrity: sha512-cBdQh4hTVCpoolCLbnlHRe13ldtnGOFkf09z+B7W/Rqe1qJuY0t0KkX2KZKU/ibCQgA+duXx9rzgyjpMMThcaw==, tarball: file:projects/text-editor.tgz}
|
||||
resolution: {integrity: sha512-4+cwBn+k4JdLN00mVFFMngvgdwonyINba4W27Ku8YKU6J0HQ6kLLY5d1htGHr1l6ZJvnrbe31tELdfJJTkJH/Q==, tarball: file:projects/text-editor.tgz}
|
||||
id: file:projects/text-editor.tgz
|
||||
name: '@rush-temp/text-editor'
|
||||
version: 0.0.0
|
||||
dependencies:
|
||||
'@tiptap/core': 2.0.0-beta.209_b7ea67b8e383e94e567bbd2a5d53cb52
|
||||
'@tiptap/core': 2.0.0-beta.209_88faf4ceee3e4a6aeb2426d142be0a06
|
||||
'@tiptap/extension-code-block': 2.0.0-beta.209_7c4b41d3b933ab120335c25fb0c64f89
|
||||
'@tiptap/extension-collaboration': 2.0.0-beta.209_c3e1167b714fc2c0e8099b0d893dd8f4
|
||||
'@tiptap/extension-collaboration-cursor': 2.0.0-beta.209_542658698eb070984e7c071802d831ce
|
||||
@ -16270,8 +16343,8 @@ packages:
|
||||
'@tiptap/extension-task-item': 2.0.0-beta.209_06eca8551536c047b95c272597601c00
|
||||
'@tiptap/extension-task-list': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/extension-typography': 2.0.0-beta.209_@tiptap+core@2.0.0-beta.209
|
||||
'@tiptap/prosemirror-tables': 1.1.4_825d0bccef8ba664696e76031c65278e
|
||||
'@tiptap/starter-kit': 2.0.0-beta.209_3f796369a8f3f702340a0f7a928227c2
|
||||
'@tiptap/prosemirror-tables': 1.1.4_820a2ec93be59195864136797737fd20
|
||||
'@tiptap/starter-kit': 2.0.0-beta.209_4bc2915a7798d139ce161d1059f26dca
|
||||
'@tiptap/suggestion': 2.0.0-beta.209_6d771ef4ce96210c593056969447c447
|
||||
'@types/diff': 5.0.2
|
||||
'@typescript-eslint/eslint-plugin': 5.42.1_d506b9be61cb4ac2646ecbc6e0680464
|
||||
@ -16292,6 +16365,7 @@ packages:
|
||||
prosemirror-dropcursor: 1.6.1
|
||||
prosemirror-gapcursor: 1.3.1
|
||||
prosemirror-history: 1.3.0
|
||||
prosemirror-keymap: 1.2.0
|
||||
prosemirror-model: 1.18.3
|
||||
prosemirror-schema-list: 1.2.2
|
||||
prosemirror-state: 1.4.2
|
||||
@ -16314,7 +16388,6 @@ packages:
|
||||
- node-sass
|
||||
- postcss
|
||||
- postcss-load-config
|
||||
- prosemirror-keymap
|
||||
- pug
|
||||
- stylus
|
||||
- sugarss
|
||||
|
@ -112,20 +112,21 @@ export async function connect (handler: (tx: Tx) => void): Promise<ClientConnect
|
||||
url: ''
|
||||
}
|
||||
},
|
||||
metrics: new MeasureMetricsContext('', {}),
|
||||
fulltextAdapter: {
|
||||
factory: createNullFullTextAdapter,
|
||||
url: '',
|
||||
metrics: new MeasureMetricsContext('', {}),
|
||||
stages: () => []
|
||||
},
|
||||
contentAdapter: {
|
||||
url: '',
|
||||
factory: createNullContentTextAdapter,
|
||||
metrics: new MeasureMetricsContext('', {})
|
||||
factory: createNullContentTextAdapter
|
||||
},
|
||||
workspace: getWorkspaceId('')
|
||||
}
|
||||
const serverStorage = await createServerStorage(conf)
|
||||
const serverStorage = await createServerStorage(conf, {
|
||||
upgrade: false
|
||||
})
|
||||
setMetadata(devmodel.metadata.DevModel, serverStorage)
|
||||
return new ServerStorageWrapper(serverStorage, handler)
|
||||
}
|
||||
|
@ -63,17 +63,16 @@ export async function start (port: number, host?: string): Promise<void> {
|
||||
fulltextAdapter: {
|
||||
factory: createNullFullTextAdapter,
|
||||
url: '',
|
||||
metrics: new MeasureMetricsContext('', {}),
|
||||
stages: () => []
|
||||
},
|
||||
metrics: new MeasureMetricsContext('', {}),
|
||||
contentAdapter: {
|
||||
url: '',
|
||||
factory: createNullContentTextAdapter,
|
||||
metrics: new MeasureMetricsContext('', {})
|
||||
factory: createNullContentTextAdapter
|
||||
},
|
||||
workspace: getWorkspaceId('')
|
||||
}
|
||||
return createPipeline(conf, [], false)
|
||||
return createPipeline(conf, [], false, () => {})
|
||||
},
|
||||
(token, pipeline, broadcast) => new ClientSession(broadcast, token, pipeline),
|
||||
port,
|
||||
|
@ -47,7 +47,7 @@ export const migrateOperations: [string, MigrateOperation][] = [
|
||||
['telegram', telegramOperation],
|
||||
['task', taskOperation],
|
||||
['attachment', attachmentOperation],
|
||||
['', automationOperation],
|
||||
['automation', automationOperation],
|
||||
['lead', leadOperation],
|
||||
['recruit', recruitOperation],
|
||||
['view', viewOperation],
|
||||
|
@ -18,16 +18,25 @@ import { Model, Builder } from '@hcengineering/model'
|
||||
import type { Resource } from '@hcengineering/platform'
|
||||
import { TClass, TDoc } from '@hcengineering/model-core'
|
||||
|
||||
import type { ObjectDDParticipant, Trigger, TriggerFunc } from '@hcengineering/server-core'
|
||||
import type {
|
||||
AsyncTrigger,
|
||||
ObjectDDParticipant,
|
||||
Trigger,
|
||||
TriggerFunc,
|
||||
AsyncTriggerState,
|
||||
AsyncTriggerFunc
|
||||
} from '@hcengineering/server-core'
|
||||
import core, {
|
||||
Class,
|
||||
Doc,
|
||||
DocumentQuery,
|
||||
DOMAIN_DOC_INDEX_STATE,
|
||||
DOMAIN_MODEL,
|
||||
FindOptions,
|
||||
FindResult,
|
||||
Hierarchy,
|
||||
Ref
|
||||
Ref,
|
||||
TxCUD
|
||||
} from '@hcengineering/core'
|
||||
import serverCore from '@hcengineering/server-core'
|
||||
|
||||
@ -36,6 +45,18 @@ export class TTrigger extends TDoc implements Trigger {
|
||||
trigger!: Resource<TriggerFunc>
|
||||
}
|
||||
|
||||
@Model(serverCore.class.AsyncTrigger, core.class.Doc, DOMAIN_MODEL)
|
||||
export class TAsyncTrigger extends TDoc implements AsyncTrigger {
|
||||
trigger!: Resource<AsyncTriggerFunc>
|
||||
classes!: Ref<Class<Doc>>[]
|
||||
}
|
||||
|
||||
@Model(serverCore.class.AsyncTriggerState, core.class.Doc, DOMAIN_DOC_INDEX_STATE)
|
||||
export class TAsyncTriggerState extends TDoc implements AsyncTriggerState {
|
||||
tx!: TxCUD<Doc>
|
||||
message!: string
|
||||
}
|
||||
|
||||
@Model(serverCore.mixin.ObjectDDParticipant, core.class.Class)
|
||||
export class TObjectDDParticipant extends TClass implements ObjectDDParticipant {
|
||||
collectDocs!: Resource<
|
||||
@ -52,5 +73,5 @@ export class TObjectDDParticipant extends TClass implements ObjectDDParticipant
|
||||
}
|
||||
|
||||
export function createModel (builder: Builder): void {
|
||||
builder.createModel(TTrigger, TObjectDDParticipant)
|
||||
builder.createModel(TTrigger, TObjectDDParticipant, TAsyncTriggerState, TAsyncTrigger)
|
||||
}
|
||||
|
@ -30,6 +30,8 @@
|
||||
"@hcengineering/platform": "^0.6.8",
|
||||
"@hcengineering/model-core": "^0.6.0",
|
||||
"@hcengineering/openai": "^0.6.0",
|
||||
"@hcengineering/server-core": "^0.6.1"
|
||||
"@hcengineering/server-core": "^0.6.1",
|
||||
"@hcengineering/model-chunter": "^0.6.0",
|
||||
"@hcengineering/model-recruit": "^0.6.0"
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,9 @@ import core, { DOMAIN_CONFIGURATION } from '@hcengineering/core'
|
||||
import openai, { OpenAIConfiguration } from '@hcengineering/openai/src/plugin'
|
||||
import serverCore from '@hcengineering/server-core'
|
||||
|
||||
import chunter from '@hcengineering/model-chunter'
|
||||
import recruit from '@hcengineering/model-recruit'
|
||||
|
||||
@Model(openai.class.OpenAIConfiguration, core.class.Configuration, DOMAIN_CONFIGURATION)
|
||||
@UX(getEmbeddedLabel('OpenAI'))
|
||||
export class TOpenAIConfiguration extends TConfiguration implements OpenAIConfiguration {
|
||||
@ -41,7 +44,8 @@ export class TOpenAIConfiguration extends TConfiguration implements OpenAIConfig
|
||||
export function createModel (builder: Builder): void {
|
||||
builder.createModel(TOpenAIConfiguration)
|
||||
|
||||
builder.createDoc(serverCore.class.Trigger, core.space.Model, {
|
||||
trigger: openai.trigger.OnGPTRequest
|
||||
builder.createDoc(serverCore.class.AsyncTrigger, core.space.Model, {
|
||||
trigger: openai.trigger.AsyncOnGPTRequest,
|
||||
classes: [chunter.class.Comment, recruit.class.ApplicantMatch]
|
||||
})
|
||||
}
|
||||
|
@ -66,5 +66,6 @@ export interface ServerStorage extends LowLevelStorage {
|
||||
options?: FindOptions<T>
|
||||
) => Promise<FindResult<T>>
|
||||
tx: (ctx: MeasureContext, tx: Tx) => Promise<[TxResult, Tx[]]>
|
||||
apply: (ctx: MeasureContext, tx: Tx[], broadcast: boolean) => Promise<Tx[]>
|
||||
close: () => Promise<void>
|
||||
}
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -0,0 +1,55 @@
|
||||
<script lang="ts">
|
||||
import { DocIndexState } from '@hcengineering/core'
|
||||
|
||||
import { EditBox, Panel } from '@hcengineering/ui'
|
||||
import IndexedDocumentContent from './IndexedDocumentContent.svelte'
|
||||
|
||||
export let left: DocIndexState
|
||||
export let right: DocIndexState | undefined
|
||||
|
||||
let search: string = ''
|
||||
</script>
|
||||
|
||||
<Panel on:changeContent on:close>
|
||||
<EditBox focus bind:value={search} kind="search-style" />
|
||||
<div class="indexed-background">
|
||||
<div class="indexed-doc text-base max-h-125">
|
||||
<div class="flex">
|
||||
<div class="indexed-doc-part">
|
||||
<IndexedDocumentContent indexDoc={left} {search} />
|
||||
</div>
|
||||
{#if right !== undefined}
|
||||
<div class="indexed-doc-part">
|
||||
<IndexedDocumentContent indexDoc={right} {search} />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Panel>
|
||||
|
||||
<style lang="scss">
|
||||
.indexed-doc {
|
||||
padding: 2.5rem;
|
||||
display: flex;
|
||||
overflow: auto;
|
||||
min-width: 50rem;
|
||||
max-width: 100rem;
|
||||
}
|
||||
.indexed-doc-part {
|
||||
padding: 0.5rem;
|
||||
display: grid;
|
||||
overflow: auto;
|
||||
min-width: 25rem;
|
||||
max-width: 50rem;
|
||||
}
|
||||
.indexed-background {
|
||||
background-color: white;
|
||||
color: black;
|
||||
user-select: text;
|
||||
// width: 200rem;
|
||||
.highlight {
|
||||
color: blue;
|
||||
}
|
||||
}
|
||||
</style>
|
@ -0,0 +1,91 @@
|
||||
<script lang="ts">
|
||||
import { AnyAttribute, DocIndexState, extractDocKey, isFullTextAttribute } from '@hcengineering/core'
|
||||
|
||||
import { Label } from '@hcengineering/ui'
|
||||
import Icon from '@hcengineering/ui/src/components/Icon.svelte'
|
||||
import { getClient } from '../utils'
|
||||
|
||||
export let indexDoc: DocIndexState
|
||||
export let search: string = ''
|
||||
|
||||
const client = getClient()
|
||||
|
||||
function getContent (extra: string[], value: string): string[] {
|
||||
const result = extra.includes('base64') ? decodeURIComponent(escape(atob(value))) : value
|
||||
|
||||
return `${result}`.split('\n')
|
||||
}
|
||||
|
||||
$: summary = indexDoc?.fullSummary ?? undefined
|
||||
|
||||
$: attributes =
|
||||
indexDoc !== undefined
|
||||
? Object.entries(indexDoc.attributes).reduce<[AnyAttribute, string[][]][]>((a, b) => {
|
||||
const bb = extractDocKey(b[0])
|
||||
if (bb._class === undefined) {
|
||||
return a
|
||||
}
|
||||
const attr = client.getHierarchy().getAttribute(bb._class, bb.attr)
|
||||
if (!isFullTextAttribute(attr)) {
|
||||
return a
|
||||
}
|
||||
const pos = a.findIndex((it) => it[0] === attr)
|
||||
if (pos !== -1) {
|
||||
a[pos][1].push(getContent(bb.extra, b[1]))
|
||||
} else {
|
||||
a.push([attr, [getContent(bb.extra, b[1])]])
|
||||
}
|
||||
return a
|
||||
}, [])
|
||||
: []
|
||||
</script>
|
||||
|
||||
{#if summary}
|
||||
{#if search.length > 0}
|
||||
Result:
|
||||
{#each summary.split('\n').filter((line) => line.toLowerCase().includes(search.toLowerCase())) as line}
|
||||
<span class:highlight={true}>{line}</span>
|
||||
{/each}
|
||||
<br />
|
||||
{/if}
|
||||
Summary:
|
||||
{#each summary.split('\n') as line}
|
||||
{@const hl = search.length > 0 && line.toLowerCase().includes(search.toLowerCase())}
|
||||
<span class:text-md={!hl} class:highlight={hl}>{line}</span>
|
||||
{/each}
|
||||
{:else if indexDoc}
|
||||
{#each attributes as attr}
|
||||
{@const clOf = client.getHierarchy().getClass(attr[0].attributeOf)}
|
||||
<div class="flex-row-center">
|
||||
{#if clOf.icon}
|
||||
<div class="mr-1">
|
||||
<Icon size={'medium'} icon={clOf.icon} />
|
||||
</div>
|
||||
{/if}
|
||||
<Label label={clOf.label} />.<Label label={attr[0].label} />
|
||||
</div>
|
||||
<div class="p-1 flex-row flex-wrap">
|
||||
{#each attr[1] as doc}
|
||||
<div class="p-1" class:flex-col={doc.length > 1}>
|
||||
{#if search.length > 0}
|
||||
Result:
|
||||
{#each doc.filter((line) => line.toLowerCase().includes(search.toLowerCase())) as line}
|
||||
<span class:highlight={true}>{line}</span>
|
||||
{/each}
|
||||
<br />
|
||||
{/if}
|
||||
{#each doc as line}
|
||||
{@const hl = search.length > 0 && line.toLowerCase().includes(search.toLowerCase())}
|
||||
<span class:text-md={!hl} class:highlight={hl}>{line}</span>
|
||||
{/each}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/each}
|
||||
{/if}
|
||||
|
||||
<style lang="scss">
|
||||
.highlight {
|
||||
color: blue;
|
||||
}
|
||||
</style>
|
@ -1,15 +1,15 @@
|
||||
<script lang="ts">
|
||||
import core, { AnyAttribute, Doc, DocIndexState, extractDocKey, isFullTextAttribute, Ref } from '@hcengineering/core'
|
||||
import core, { Doc, DocIndexState, Ref } from '@hcengineering/core'
|
||||
|
||||
import { EditBox, Label, Panel } from '@hcengineering/ui'
|
||||
import Icon from '@hcengineering/ui/src/components/Icon.svelte'
|
||||
import { createQuery, getClient } from '../utils'
|
||||
import { EditBox, Panel } from '@hcengineering/ui'
|
||||
import { createQuery } from '../utils'
|
||||
import IndexedDocumentContent from './IndexedDocumentContent.svelte'
|
||||
|
||||
export let objectId: Ref<Doc>
|
||||
export let objectId: Ref<Doc> | undefined
|
||||
export let indexDoc: DocIndexState | undefined = undefined
|
||||
export let search: string = ''
|
||||
|
||||
const client = getClient()
|
||||
const indexDocQuery = createQuery()
|
||||
let indexDoc: DocIndexState | undefined
|
||||
$: if (objectId !== undefined) {
|
||||
indexDocQuery.query(core.class.DocIndexState, { _id: objectId as Ref<DocIndexState> }, (res) => {
|
||||
console.log(res)
|
||||
@ -18,84 +18,14 @@
|
||||
} else {
|
||||
indexDocQuery.unsubscribe()
|
||||
}
|
||||
|
||||
function getContent (extra: string[], value: string): string[] {
|
||||
const result = extra.includes('base64') ? decodeURIComponent(escape(atob(value))) : value
|
||||
|
||||
return `${result}`.split('\n')
|
||||
}
|
||||
let search = ''
|
||||
|
||||
$: summary = indexDoc?.fullSummary ?? undefined
|
||||
|
||||
$: attributes =
|
||||
indexDoc !== undefined
|
||||
? Object.entries(indexDoc.attributes).reduce<[AnyAttribute, string[][]][]>((a, b) => {
|
||||
const bb = extractDocKey(b[0])
|
||||
if (bb._class === undefined) {
|
||||
return a
|
||||
}
|
||||
const attr = client.getHierarchy().getAttribute(bb._class, bb.attr)
|
||||
if (!isFullTextAttribute(attr)) {
|
||||
return a
|
||||
}
|
||||
const pos = a.findIndex((it) => it[0] === attr)
|
||||
if (pos !== -1) {
|
||||
a[pos][1].push(getContent(bb.extra, b[1]))
|
||||
} else {
|
||||
a.push([attr, [getContent(bb.extra, b[1])]])
|
||||
}
|
||||
return a
|
||||
}, [])
|
||||
: []
|
||||
</script>
|
||||
|
||||
<Panel on:changeContent on:close>
|
||||
<EditBox focus bind:value={search} kind="search-style" />
|
||||
<div class="indexed-background">
|
||||
<div class="indexed-doc text-base max-h-125">
|
||||
{#if summary}
|
||||
{#if search.length > 0}
|
||||
Result:
|
||||
{#each summary.split('\n').filter((line) => line.toLowerCase().includes(search.toLowerCase())) as line}
|
||||
<span class:highlight={true}>{line}</span>
|
||||
{/each}
|
||||
<br />
|
||||
{/if}
|
||||
Summary:
|
||||
{#each summary.split('\n') as line}
|
||||
{@const hl = search.length > 0 && line.toLowerCase().includes(search.toLowerCase())}
|
||||
<span class:text-md={!hl} class:highlight={hl}>{line}</span>
|
||||
{/each}
|
||||
{:else if indexDoc}
|
||||
{#each attributes as attr}
|
||||
{@const clOf = client.getHierarchy().getClass(attr[0].attributeOf)}
|
||||
<div class="flex-row-center">
|
||||
{#if clOf.icon}
|
||||
<div class="mr-1">
|
||||
<Icon size={'medium'} icon={clOf.icon} />
|
||||
</div>
|
||||
{/if}
|
||||
<Label label={clOf.label} />.<Label label={attr[0].label} />
|
||||
</div>
|
||||
<div class="p-1 flex-row flex-wrap">
|
||||
{#each attr[1] as doc}
|
||||
<div class="p-1" class:flex-col={doc.length > 1}>
|
||||
{#if search.length > 0}
|
||||
Result:
|
||||
{#each doc.filter((line) => line.toLowerCase().includes(search.toLowerCase())) as line}
|
||||
<span class:highlight={true}>{line}</span>
|
||||
{/each}
|
||||
<br />
|
||||
{/if}
|
||||
{#each doc as line}
|
||||
{@const hl = search.length > 0 && line.toLowerCase().includes(search.toLowerCase())}
|
||||
<span class:text-md={!hl} class:highlight={hl}>{line}</span>
|
||||
{/each}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/each}
|
||||
{#if indexDoc}
|
||||
<IndexedDocumentContent {indexDoc} {search} />
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
@ -113,8 +43,5 @@
|
||||
background-color: white;
|
||||
color: black;
|
||||
user-select: text;
|
||||
.highlight {
|
||||
color: blue;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
@ -46,6 +46,7 @@ export { default as IconPerson } from './components/icons/Person.svelte'
|
||||
export { default as IconMembersOutline } from './components/icons/MembersOutline.svelte'
|
||||
export { default as ObjectSearchPopup } from './components/ObjectSearchPopup.svelte'
|
||||
export { default as IndexedDocumentPreview } from './components/IndexedDocumentPreview.svelte'
|
||||
export { default as IndexedDocumentCompare } from './components/IndexedDocumentCompare.svelte'
|
||||
export { default as DraggableList } from './components/DraggableList.svelte'
|
||||
export { connect, versionError } from './connect'
|
||||
export { default } from './plugin'
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
@ -65,6 +66,7 @@
|
||||
"prosemirror-model": "^1.18.3",
|
||||
"prosemirror-view": "^1.29.1",
|
||||
"prosemirror-history": "^1.3.0",
|
||||
"prosemirror-keymap": "^1.2.0",
|
||||
"rfc6902": "^5.0.1",
|
||||
"diff": "^5.1.0",
|
||||
"@tiptap/extension-code-block": "~2.0.0-beta.209",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -6,7 +6,7 @@
|
||||
"license": "EPL-2.0",
|
||||
"scripts": {
|
||||
"build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build",
|
||||
"build:watch": "tsc",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache",
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -210,12 +210,9 @@
|
||||
|
||||
async function onMessage (event: CustomEvent) {
|
||||
loading = true
|
||||
try {
|
||||
await createAttachments()
|
||||
dispatch('message', { message: event.detail, attachments: attachments.size })
|
||||
} finally {
|
||||
loading = false
|
||||
}
|
||||
dispatch('message', { message: event.detail, attachments: attachments.size })
|
||||
}
|
||||
|
||||
async function onUpdate (event: CustomEvent) {
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -112,7 +112,6 @@
|
||||
|
||||
async function onMessage (event: CustomEvent) {
|
||||
loading = true
|
||||
try {
|
||||
const { message, attachments } = event.detail
|
||||
await client.addCollection<Doc, Comment>(
|
||||
_class,
|
||||
@ -132,10 +131,8 @@
|
||||
draftComment = undefined
|
||||
await saveDraft(object)
|
||||
commentInputBox.removeDraft(false)
|
||||
} finally {
|
||||
loading = false
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<AttachmentRefInput
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -5,15 +5,15 @@
|
||||
Card,
|
||||
createQuery,
|
||||
getClient,
|
||||
IndexedDocumentPreview,
|
||||
IndexedDocumentCompare,
|
||||
MessageViewer,
|
||||
SpaceSelect
|
||||
} from '@hcengineering/presentation'
|
||||
import { Applicant, ApplicantMatch, Candidate, Vacancy } from '@hcengineering/recruit'
|
||||
import { Button, IconActivity, IconAdd, Label, resizeObserver, showPopup, tooltip } from '@hcengineering/ui'
|
||||
import { Button, IconActivity, IconAdd, Label, resizeObserver, showPopup, Spinner, tooltip } from '@hcengineering/ui'
|
||||
import Scroller from '@hcengineering/ui/src/components/Scroller.svelte'
|
||||
import { MarkupPreviewPopup, ObjectPresenter } from '@hcengineering/view-resources'
|
||||
import { cosinesim } from '@hcengineering/view-resources/src/utils'
|
||||
import { calcSørensenDiceCoefficient, cosinesim } from '@hcengineering/view-resources/src/utils'
|
||||
import { createEventDispatcher } from 'svelte'
|
||||
import recruit from '../plugin'
|
||||
import CreateApplication from './CreateApplication.svelte'
|
||||
@ -50,8 +50,20 @@
|
||||
state = new Map(res.map((it) => [it._id, it] ?? []))
|
||||
}
|
||||
)
|
||||
|
||||
$: vacancyState = state.get(_space as unknown as Ref<DocIndexState>)
|
||||
|
||||
$: scoreState = new Map(
|
||||
_objects.map((it) => [
|
||||
it._id,
|
||||
Math.round(
|
||||
calcSørensenDiceCoefficient(state.get(it._id)?.fullSummary ?? '', vacancyState?.fullSummary ?? '') * 100
|
||||
) / 100
|
||||
])
|
||||
)
|
||||
|
||||
$: _sortedObjects = [..._objects].sort((a, b) => (scoreState.get(b._id) ?? 0) - (scoreState.get(a._id) ?? 0))
|
||||
|
||||
const matchQuery = createQuery()
|
||||
let matches: Map<Ref<Doc>, ApplicantMatch> = new Map()
|
||||
|
||||
@ -88,6 +100,7 @@
|
||||
}
|
||||
}
|
||||
$: vacancyEmbedding = vacancyState && getEmbedding(vacancyState)
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
const client = getClient()
|
||||
@ -126,8 +139,8 @@
|
||||
'top'
|
||||
)
|
||||
}
|
||||
async function showSummary (doc: Candidate): Promise<void> {
|
||||
showPopup(IndexedDocumentPreview, { objectId: doc._id }, 'top')
|
||||
async function showSummary (left: DocIndexState, right?: DocIndexState): Promise<void> {
|
||||
showPopup(IndexedDocumentCompare, { left, right }, 'top')
|
||||
}
|
||||
</script>
|
||||
|
||||
@ -172,7 +185,7 @@
|
||||
<div class="p-1">
|
||||
{#if vacancy}
|
||||
<Scroller>
|
||||
<div class="flex-col max-h-60">
|
||||
<div class="flex-col max-h-60 select-text">
|
||||
{#if vacancy.description}
|
||||
{vacancy.description}
|
||||
{/if}
|
||||
@ -197,7 +210,7 @@
|
||||
</thead>
|
||||
|
||||
<tbody>
|
||||
{#each _objects as doc}
|
||||
{#each _sortedObjects as doc}
|
||||
{@const docState = state.get(doc._id)}
|
||||
{@const docEmbedding = docState && getEmbedding(docState)}
|
||||
{@const match = matches.get(doc._id)}
|
||||
@ -213,10 +226,12 @@
|
||||
{/if}
|
||||
</div>
|
||||
</td>
|
||||
<td>
|
||||
<td class="whitespace-nowrap">
|
||||
{#if docEmbedding && vacancyEmbedding}
|
||||
{Math.round(cosinesim(docEmbedding, vacancyEmbedding) * 100)}
|
||||
/
|
||||
{/if}
|
||||
{scoreState.get(doc._id) ?? 0}
|
||||
</td>
|
||||
<td>
|
||||
{#if match?.complete}
|
||||
@ -232,13 +247,13 @@
|
||||
{#if docState}
|
||||
<Button
|
||||
label={recruit.string.PerformMatch}
|
||||
loading={matching.has(doc._id) || !(match?.complete ?? true)}
|
||||
icon={matching.has(doc._id) || !(match?.complete ?? true) ? Spinner : IconActivity}
|
||||
on:click={() => requestMatch(doc, docState)}
|
||||
/>
|
||||
<Button
|
||||
icon={IconActivity}
|
||||
showTooltip={{ label: presentation.string.DocumentPreview }}
|
||||
on:click={() => showSummary(doc)}
|
||||
on:click={() => showSummary(docState, vacancyState)}
|
||||
/>
|
||||
<Button
|
||||
icon={IconAdd}
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"svelte-check": "svelte-check"
|
||||
"svelte-check": "svelte-check",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@hcengineering/platform-rig": "^0.6.0",
|
||||
|
@ -10,7 +10,8 @@
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"svelte-check": "svelte-check",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -29,8 +29,10 @@
|
||||
dispatch('changeContent')
|
||||
}}
|
||||
on:close={() => dispatch('close', null)}
|
||||
style:overflow={'auto'}
|
||||
style:width={'100%'}
|
||||
>
|
||||
<div class="flex-grow mt-4">
|
||||
<div class="flex-grow p-4">
|
||||
<MessageViewer message={value} />
|
||||
</div>
|
||||
</div>
|
||||
|
@ -601,6 +601,38 @@ export function cosinesim (A: number[], B: number[]): number {
|
||||
return similarity
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Sørensen–Dice coefficient
|
||||
*/
|
||||
export function calcSørensenDiceCoefficient (a: string, b: string): number {
|
||||
const first = a.replace(/\s+/g, '')
|
||||
const second = b.replace(/\s+/g, '')
|
||||
|
||||
if (first === second) return 1 // identical or empty
|
||||
if (first.length < 2 || second.length < 2) return 0 // if either is a 0-letter or 1-letter string
|
||||
|
||||
const firstBigrams = new Map<string, number>()
|
||||
for (let i = 0; i < first.length - 1; i++) {
|
||||
const bigram = first.substring(i, i + 2)
|
||||
const count = (firstBigrams.get(bigram) ?? 0) + 1
|
||||
|
||||
firstBigrams.set(bigram, count)
|
||||
}
|
||||
|
||||
let intersectionSize = 0
|
||||
for (let i = 0; i < second.length - 1; i++) {
|
||||
const bigram = second.substring(i, i + 2)
|
||||
const count = firstBigrams.get(bigram) ?? 0
|
||||
|
||||
if (count > 0) {
|
||||
firstBigrams.set(bigram, count - 1)
|
||||
intersectionSize++
|
||||
}
|
||||
}
|
||||
|
||||
return (2.0 * intersectionSize) / (first.length + second.length - 2)
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
|
@ -9,7 +9,8 @@
|
||||
"build:docs": "api-extractor run --local",
|
||||
"lint": "svelte-check && eslint",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache"
|
||||
},
|
||||
"devDependencies": {
|
||||
"svelte-loader": "^3.1.3",
|
||||
|
@ -84,7 +84,6 @@
|
||||
"@hcengineering/minio": "^0.6.0",
|
||||
"@hcengineering/openai": "^0.6.0",
|
||||
"@hcengineering/translate": "^0.6.0",
|
||||
|
||||
"@hcengineering/login-assets": "^0.6.0",
|
||||
"@hcengineering/view-assets": "^0.6.0",
|
||||
"@hcengineering/task-assets": "^0.6.0",
|
||||
@ -111,7 +110,6 @@
|
||||
"@hcengineering/document-assets": "^0.6.0",
|
||||
"@hcengineering/bitrix-assets": "^0.6.0",
|
||||
"@hcengineering/request-assets": "^0.6.0",
|
||||
|
||||
"@hcengineering/login": "^0.6.1",
|
||||
"@hcengineering/view": "^0.6.2",
|
||||
"@hcengineering/task": "^0.6.1",
|
||||
|
@ -14,6 +14,7 @@
|
||||
//
|
||||
|
||||
import {
|
||||
coreId,
|
||||
DOMAIN_BLOB,
|
||||
DOMAIN_FULLTEXT_BLOB,
|
||||
DOMAIN_MODEL,
|
||||
@ -99,6 +100,8 @@ import { trackerId } from '@hcengineering/tracker'
|
||||
import { viewId } from '@hcengineering/view'
|
||||
import { workbenchId } from '@hcengineering/workbench'
|
||||
|
||||
import coreEng from '@hcengineering/core/src/lang/en.json'
|
||||
|
||||
import loginEng from '@hcengineering/login-assets/lang/en.json'
|
||||
|
||||
import taskEn from '@hcengineering/task-assets/lang/en.json'
|
||||
@ -126,6 +129,8 @@ import hrEn from '@hcengineering/hr-assets/lang/en.json'
|
||||
import documentEn from '@hcengineering/document-assets/lang/en.json'
|
||||
import bitrixEn from '@hcengineering/bitrix-assets/lang/en.json'
|
||||
import requestEn from '@hcengineering/request-assets/lang/en.json'
|
||||
|
||||
addStringsLoader(coreId, async (lang: string) => coreEng)
|
||||
addStringsLoader(loginId, async (lang: string) => loginEng)
|
||||
|
||||
addStringsLoader(taskId, async (lang: string) => taskEn)
|
||||
@ -190,7 +195,7 @@ export function start (
|
||||
ConfigurationMiddleware.create
|
||||
]
|
||||
|
||||
const fullText = getMetricsContext().newChild('fulltext', {})
|
||||
const metrics = getMetricsContext().newChild('indexing', {})
|
||||
function createIndexStages (
|
||||
fullText: MeasureContext,
|
||||
workspace: WorkspaceId,
|
||||
@ -243,7 +248,7 @@ export function start (
|
||||
|
||||
return startJsonRpc(
|
||||
getMetricsContext(),
|
||||
(workspace: WorkspaceId, upgrade: boolean) => {
|
||||
(workspace, upgrade, broadcast) => {
|
||||
const conf: DbConfiguration = {
|
||||
domains: {
|
||||
[DOMAIN_TX]: 'MongoTx',
|
||||
@ -252,6 +257,7 @@ export function start (
|
||||
[DOMAIN_FULLTEXT_BLOB]: 'FullTextBlob',
|
||||
[DOMAIN_MODEL]: 'Null'
|
||||
},
|
||||
metrics,
|
||||
defaultAdapter: 'Mongo',
|
||||
adapters: {
|
||||
MongoTx: {
|
||||
@ -282,14 +288,19 @@ export function start (
|
||||
fulltextAdapter: {
|
||||
factory: createElasticAdapter,
|
||||
url: fullTextUrl,
|
||||
metrics: fullText,
|
||||
stages: (adapter, storage, storageAdapter, contentAdapter) =>
|
||||
createIndexStages(fullText, workspace, adapter, storage, storageAdapter, contentAdapter)
|
||||
createIndexStages(
|
||||
metrics.newChild('stages', {}),
|
||||
workspace,
|
||||
adapter,
|
||||
storage,
|
||||
storageAdapter,
|
||||
contentAdapter
|
||||
)
|
||||
},
|
||||
contentAdapter: {
|
||||
factory: createRekoniAdapter,
|
||||
url: rekoniUrl,
|
||||
metrics: getMetricsContext().newChild('content', {})
|
||||
url: rekoniUrl
|
||||
},
|
||||
storageFactory: () =>
|
||||
new MinioService({
|
||||
@ -299,7 +310,7 @@ export function start (
|
||||
}),
|
||||
workspace
|
||||
}
|
||||
return createPipeline(conf, middlewares, upgrade)
|
||||
return createPipeline(conf, middlewares, upgrade, broadcast)
|
||||
},
|
||||
(token: Token, pipeline: Pipeline, broadcast: BroadcastCall) => {
|
||||
if (token.extra?.mode === 'backup') {
|
||||
|
@ -107,7 +107,7 @@ export class OpenAIEmbeddingsStage implements FullTextPipelineStage {
|
||||
updateSummary (summary: FullSummaryStage): void {
|
||||
summary.fieldFilter.push((attr, value) => {
|
||||
const tMarkup = attr.type._class === core.class.TypeMarkup
|
||||
const lowerCase = value.toLocaleLowerCase()
|
||||
const lowerCase: string = value.toLocaleLowerCase()
|
||||
if (tMarkup && (lowerCase.includes('gpt:') || lowerCase.includes('gpt Answer:'))) {
|
||||
return false
|
||||
}
|
||||
@ -253,11 +253,11 @@ export class OpenAIEmbeddingsStage implements FullTextPipelineStage {
|
||||
const docs = await this.adapter.searchEmbedding(_classes, query, embedding, {
|
||||
size,
|
||||
from,
|
||||
minScore: -100,
|
||||
embeddingBoost: 100,
|
||||
minScore: -9,
|
||||
embeddingBoost: 10,
|
||||
field: this.field,
|
||||
field_enable: this.field_enabled,
|
||||
fulltextBoost: 10
|
||||
fulltextBoost: 1
|
||||
})
|
||||
return {
|
||||
docs,
|
||||
|
@ -17,7 +17,7 @@ import type { Plugin, Resource } from '@hcengineering/platform'
|
||||
import { plugin } from '@hcengineering/platform'
|
||||
|
||||
import type { Account, Class, Ref } from '@hcengineering/core'
|
||||
import { TriggerFunc } from '@hcengineering/server-core'
|
||||
import { AsyncTriggerFunc } from '@hcengineering/server-core'
|
||||
import type { OpenAIConfiguration } from './types'
|
||||
|
||||
export * from './types'
|
||||
@ -31,7 +31,7 @@ export const openAIId = 'openai' as Plugin
|
||||
*/
|
||||
const openaiPlugin = plugin(openAIId, {
|
||||
trigger: {
|
||||
OnGPTRequest: '' as Resource<TriggerFunc>
|
||||
AsyncOnGPTRequest: '' as Resource<AsyncTriggerFunc>
|
||||
},
|
||||
class: {
|
||||
OpenAIConfiguration: '' as Ref<Class<OpenAIConfiguration>>
|
||||
|
@ -26,18 +26,18 @@ import core, {
|
||||
TxCUD,
|
||||
TxProcessor
|
||||
} from '@hcengineering/core'
|
||||
import type { TriggerControl } from '@hcengineering/server-core'
|
||||
import recruit, { ApplicantMatch } from '@hcengineering/recruit'
|
||||
import type { AsyncTriggerControl } from '@hcengineering/server-core'
|
||||
import got from 'got'
|
||||
import { convert } from 'html-to-text'
|
||||
import { chunks, encode } from './encoder/encoder'
|
||||
import { chunks } from './encoder/encoder'
|
||||
import openai, { OpenAIConfiguration, openAIRatelimitter } from './plugin'
|
||||
import recruit, { ApplicantMatch } from '@hcengineering/recruit'
|
||||
|
||||
const model = 'text-davinci-003'
|
||||
|
||||
const defaultOptions = {
|
||||
max_tokens: 4000,
|
||||
temperature: 0.9,
|
||||
temperature: 0.2,
|
||||
top_p: 1,
|
||||
n: 1,
|
||||
stop: null as string | null
|
||||
@ -46,16 +46,31 @@ const defaultOptions = {
|
||||
async function performCompletion (
|
||||
prompt: string,
|
||||
options: typeof defaultOptions,
|
||||
config: OpenAIConfiguration
|
||||
config: OpenAIConfiguration,
|
||||
maxLen: number
|
||||
): Promise<any> {
|
||||
const ep = config.endpoint + '/completions'
|
||||
|
||||
const chunkedPrompt = chunks(prompt, options.max_tokens - 250)[0]
|
||||
const tokens = encode(chunkedPrompt).length
|
||||
const chunkedPrompt = chunks(prompt, options.max_tokens - maxLen)[0]
|
||||
|
||||
let response: any
|
||||
let timeout = 50
|
||||
const st = Date.now()
|
||||
const request: Record<string, any> = {
|
||||
model,
|
||||
prompt: chunkedPrompt,
|
||||
max_tokens: maxLen,
|
||||
temperature: options.temperature,
|
||||
top_p: options.top_p,
|
||||
n: options.n,
|
||||
stream: false
|
||||
}
|
||||
if (options.stop != null) {
|
||||
request.stop = options.stop
|
||||
}
|
||||
while (true) {
|
||||
try {
|
||||
console.info('Sending request to OpenAI')
|
||||
response = await openAIRatelimitter.exec(
|
||||
async () =>
|
||||
await got
|
||||
@ -64,18 +79,8 @@ async function performCompletion (
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${config.token}`
|
||||
},
|
||||
json: {
|
||||
model,
|
||||
prompt: chunkedPrompt,
|
||||
max_tokens: options.max_tokens - tokens,
|
||||
temperature: options.temperature,
|
||||
top_p: options.top_p,
|
||||
n: options.n,
|
||||
stream: false,
|
||||
logprobs: null,
|
||||
stop: options.stop
|
||||
},
|
||||
timeout: 180000
|
||||
json: request,
|
||||
timeout: 60000
|
||||
})
|
||||
.json()
|
||||
)
|
||||
@ -84,15 +89,21 @@ async function performCompletion (
|
||||
const msg = (e.message as string) ?? ''
|
||||
if (
|
||||
msg.includes('Response code 429 (Too Many Requests)') ||
|
||||
msg.includes('Response code 503 (Service Unavailable)')
|
||||
msg.includes('Response code 503 (Service Unavailable)') ||
|
||||
msg.includes('Response code 400 (Bad Request)')
|
||||
) {
|
||||
timeout += 100
|
||||
console.info('Too many requests, Waiting 1sec to retry.')
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 1000)
|
||||
setTimeout(resolve, timeout)
|
||||
})
|
||||
continue
|
||||
}
|
||||
if (Date.now() - st > 60000) {
|
||||
return {}
|
||||
}
|
||||
console.error(e)
|
||||
return []
|
||||
return {}
|
||||
}
|
||||
}
|
||||
return response
|
||||
@ -100,7 +111,7 @@ async function performCompletion (
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function OnGPTRequest (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
|
||||
export async function AsyncOnGPTRequest (tx: Tx, tc: AsyncTriggerControl): Promise<Tx[]> {
|
||||
const actualTx = TxProcessor.extractTx(tx)
|
||||
|
||||
if (tc.hierarchy.isDerived(actualTx._class, core.class.TxCUD) && actualTx.modifiedBy !== openai.account.GPT) {
|
||||
@ -116,7 +127,7 @@ export async function OnGPTRequest (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
|
||||
return []
|
||||
}
|
||||
|
||||
async function handleComment (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
|
||||
async function handleComment (tx: Tx, tc: AsyncTriggerControl): Promise<Tx[]> {
|
||||
const actualTx = TxProcessor.extractTx(tx)
|
||||
const cud: TxCUD<Doc> = actualTx as TxCUD<Doc>
|
||||
|
||||
@ -178,7 +189,7 @@ async function handleComment (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
|
||||
|
||||
const options = parseOptions(split)
|
||||
|
||||
const response = await performCompletion(prompt, options, config)
|
||||
const response = await performCompletion(prompt, options, config, 1024)
|
||||
const result: Tx[] = []
|
||||
|
||||
let finalMsg = msg + '</br>'
|
||||
@ -205,19 +216,60 @@ async function handleComment (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
|
||||
)
|
||||
// col.modifiedBy = openai.account.GPT
|
||||
result.push(col)
|
||||
|
||||
// Store response transactions
|
||||
await tc.txFx(async (st) => {
|
||||
for (const t of result) {
|
||||
await st.tx(t)
|
||||
}
|
||||
})
|
||||
return result
|
||||
}
|
||||
}
|
||||
return []
|
||||
}
|
||||
async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]> {
|
||||
|
||||
function getText (response: any): string | undefined {
|
||||
let result = ''
|
||||
for (const choices of response?.choices ?? []) {
|
||||
let val = (choices.text as string).trim()
|
||||
// Add new line before Reason:
|
||||
val = val.split('\n\n').join('\n')
|
||||
val = val.replace('Reason:', '\nReason:')
|
||||
val = val.replace('Candidate is', '\nCandidate is')
|
||||
val = val.replace(/Match score: (\d+\/\d+|\d+%) /gi, (val) => val + '\n')
|
||||
|
||||
val = val.split('\n').join('\n<br/>')
|
||||
result += val.trim()
|
||||
}
|
||||
if (result.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async function summarizeCandidate (config: OpenAIConfiguration, chunks: string[], maxLen: number): Promise<string> {
|
||||
const options: typeof defaultOptions = {
|
||||
...defaultOptions,
|
||||
temperature: 0.1
|
||||
}
|
||||
if (chunks.length === 1) {
|
||||
return chunks[0]
|
||||
}
|
||||
const candidateSummaryRequest = `I want you to act as a recruiter.
|
||||
I will provide some information about candidate, and it will be your job to come up with short and essential summary describing resume.
|
||||
My first request is "I need help to summarize my CV.” ${chunks.join(' ')}`
|
||||
return getText(await performCompletion(candidateSummaryRequest, options, config, maxLen)) ?? chunks[0]
|
||||
}
|
||||
|
||||
async function summarizeVacancy (config: OpenAIConfiguration, chunks: string[], maxLen: number): Promise<string> {
|
||||
const options: typeof defaultOptions = {
|
||||
...defaultOptions,
|
||||
temperature: 0.1
|
||||
}
|
||||
if (chunks.length === 1) {
|
||||
return chunks[0]
|
||||
}
|
||||
const candidateSummaryRequest = `I want you to act as a recruiter.
|
||||
I will provide some information about vacancy, and it will be your job to come up with short and essential summary describing vacancy.
|
||||
My first request is "I need help to summarize my Vacancy description.” ${chunks.join(' ')}`
|
||||
return getText(await performCompletion(candidateSummaryRequest, options, config, maxLen)) ?? chunks[0]
|
||||
}
|
||||
|
||||
async function handleApplicantMatch (tx: Tx, tc: AsyncTriggerControl): Promise<Tx[]> {
|
||||
const [config] = await tc.findAll(openai.class.OpenAIConfiguration, {})
|
||||
|
||||
if (!(config?.enabled ?? false)) {
|
||||
@ -236,7 +288,7 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
|
||||
temperature: 0.1
|
||||
}
|
||||
|
||||
const maxAnswerTokens = 500
|
||||
const maxAnswerTokens = 256
|
||||
const maxVacancyTokens = options.max_tokens - maxAnswerTokens / 2
|
||||
const maxCandidateTokens = maxVacancyTokens
|
||||
|
||||
@ -247,7 +299,9 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
|
||||
selectors: [{ selector: 'img', format: 'skip' }]
|
||||
})
|
||||
|
||||
candidateText = chunks(candidateText, maxCandidateTokens)[0]
|
||||
const candidateTextC = chunks(candidateText, maxCandidateTokens)
|
||||
|
||||
candidateText = await summarizeCandidate(config, candidateTextC, maxCandidateTokens)
|
||||
|
||||
let vacancyText = cud.attributes.vacancy
|
||||
|
||||
@ -255,18 +309,19 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
|
||||
preserveNewlines: true,
|
||||
selectors: [{ selector: 'img', format: 'skip' }]
|
||||
})
|
||||
vacancyText = chunks(vacancyText, maxVacancyTokens)[0]
|
||||
vacancyText = await summarizeVacancy(config, chunks(vacancyText, maxVacancyTokens), maxVacancyTokens)
|
||||
|
||||
// Enabled, we could complete.
|
||||
const text = `'I want you to act as a recruiter. I will provide some information about vacancy and resume, and it will be your job to come up with solution why candidate is matching vacancy. Please considering following vacancy:\n ${vacancyText}\n and please write if following candidate good match for vacancy and why:\n ${candidateText}\n`
|
||||
// const text = `I want you to act as a recruiter.
|
||||
// I will provide some information about vacancy and resume, and it will be your job to come up with solution why candidate is matching vacancy.
|
||||
// My first request is "I need help to match vacancy ${vacancyText} and CV: ${candidateText}”`
|
||||
|
||||
const text = `'Considering following vacancy:\n ${vacancyText}\n write if following candidate good for vacancy and why:\n ${candidateText}\n`
|
||||
|
||||
const response = await performCompletion(text, options, config)
|
||||
const response = await performCompletion(text, options, config, maxAnswerTokens)
|
||||
const result: Tx[] = []
|
||||
|
||||
let finalMsg = ''
|
||||
|
||||
for (const choices of response.choices) {
|
||||
for (const choices of response?.choices ?? []) {
|
||||
let val = (choices.text as string).trim()
|
||||
// Add new line before Reason:
|
||||
val = val.split('\n\n').join('\n')
|
||||
@ -291,13 +346,6 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
|
||||
)
|
||||
// col.modifiedBy = openai.account.GPT
|
||||
result.push(col)
|
||||
|
||||
// Store response transactions
|
||||
await tc.txFx(async (st) => {
|
||||
for (const t of result) {
|
||||
await st.tx(t)
|
||||
}
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
@ -306,7 +354,7 @@ async function handleApplicantMatch (tx: Tx, tc: TriggerControl): Promise<Tx[]>
|
||||
*/
|
||||
export const openAIPluginImpl = async () => ({
|
||||
trigger: {
|
||||
OnGPTRequest
|
||||
AsyncOnGPTRequest
|
||||
}
|
||||
})
|
||||
function parseOptions (split: string[]): typeof defaultOptions {
|
||||
|
@ -36,10 +36,12 @@ import { Middleware, MiddlewareCreator, Pipeline, SessionContext } from './types
|
||||
export async function createPipeline (
|
||||
conf: DbConfiguration,
|
||||
constructors: MiddlewareCreator[],
|
||||
upgrade: boolean
|
||||
upgrade: boolean,
|
||||
broadcast: (tx: Tx[]) => void
|
||||
): Promise<Pipeline> {
|
||||
const storage = await createServerStorage(conf, {
|
||||
upgrade
|
||||
upgrade,
|
||||
broadcast
|
||||
})
|
||||
return new TPipeline(storage, constructors)
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ import type { Plugin } from '@hcengineering/platform'
|
||||
import { plugin } from '@hcengineering/platform'
|
||||
|
||||
import type { Class, Ref, Space } from '@hcengineering/core'
|
||||
import type { ObjectDDParticipant, Trigger } from './types'
|
||||
import type { AsyncTrigger, AsyncTriggerState, ObjectDDParticipant, Trigger } from './types'
|
||||
|
||||
/**
|
||||
* @public
|
||||
@ -30,13 +30,16 @@ export const serverCoreId = 'server-core' as Plugin
|
||||
*/
|
||||
const serverCore = plugin(serverCoreId, {
|
||||
class: {
|
||||
Trigger: '' as Ref<Class<Trigger>>
|
||||
Trigger: '' as Ref<Class<Trigger>>,
|
||||
AsyncTrigger: '' as Ref<Class<AsyncTrigger>>,
|
||||
AsyncTriggerState: '' as Ref<Class<AsyncTriggerState>>
|
||||
},
|
||||
mixin: {
|
||||
ObjectDDParticipant: '' as Ref<ObjectDDParticipant>
|
||||
},
|
||||
space: {
|
||||
DocIndexState: '' as Ref<Space>
|
||||
DocIndexState: '' as Ref<Space>,
|
||||
TriggerState: '' as Ref<Space>
|
||||
}
|
||||
})
|
||||
|
||||
|
123
server/core/src/processor/index.ts
Normal file
123
server/core/src/processor/index.ts
Normal file
@ -0,0 +1,123 @@
|
||||
import core, {
|
||||
Class,
|
||||
Doc,
|
||||
Hierarchy,
|
||||
MeasureContext,
|
||||
ModelDb,
|
||||
Ref,
|
||||
ServerStorage,
|
||||
Tx,
|
||||
TxCUD,
|
||||
TxFactory,
|
||||
TxProcessor
|
||||
} from '@hcengineering/core'
|
||||
import { getResource } from '@hcengineering/platform'
|
||||
import plugin from '../plugin'
|
||||
import { AsyncTrigger, AsyncTriggerControl, AsyncTriggerFunc } from '../types'
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export class AsyncTriggerProcessor {
|
||||
canceling: boolean = false
|
||||
|
||||
processing: Promise<void> | undefined
|
||||
|
||||
triggers: AsyncTrigger[] = []
|
||||
|
||||
classes: Ref<Class<Doc>>[] = []
|
||||
|
||||
factory = new TxFactory(core.account.System)
|
||||
|
||||
functions: AsyncTriggerFunc[] = []
|
||||
|
||||
trigger = (): void => {}
|
||||
|
||||
control: AsyncTriggerControl
|
||||
|
||||
constructor (
|
||||
readonly model: ModelDb,
|
||||
readonly hierarchy: Hierarchy,
|
||||
readonly storage: ServerStorage,
|
||||
readonly metrics: MeasureContext
|
||||
) {
|
||||
this.control = {
|
||||
hierarchy: this.hierarchy,
|
||||
modelDb: this.model,
|
||||
txFactory: this.factory,
|
||||
findAll: async (_class, query, options) => {
|
||||
return await this.storage.findAll(this.metrics, _class, query, options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async cancel (): Promise<void> {
|
||||
this.canceling = true
|
||||
await this.processing
|
||||
}
|
||||
|
||||
async start (): Promise<void> {
|
||||
await this.updateTriggers()
|
||||
this.processing = this.doProcessing()
|
||||
}
|
||||
|
||||
async updateTriggers (): Promise<void> {
|
||||
try {
|
||||
this.triggers = await this.model.findAll(plugin.class.AsyncTrigger, {})
|
||||
this.classes = this.triggers.reduce<Ref<Class<Doc>>[]>((arr, it) => arr.concat(it.classes), [])
|
||||
this.functions = await Promise.all(this.triggers.map(async (trigger) => await getResource(trigger.trigger)))
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
}
|
||||
}
|
||||
|
||||
async tx (tx: Tx[]): Promise<void> {
|
||||
const result: Tx[] = []
|
||||
for (const _tx of tx) {
|
||||
const actualTx = TxProcessor.extractTx(_tx)
|
||||
if (
|
||||
this.hierarchy.isDerived(actualTx._class, core.class.TxCUD) &&
|
||||
this.hierarchy.isDerived(_tx._class, core.class.TxCUD)
|
||||
) {
|
||||
const cud = actualTx as TxCUD<Doc>
|
||||
if (this.classes.some((it) => this.hierarchy.isDerived(cud.objectClass, it))) {
|
||||
// We need processing
|
||||
result.push(
|
||||
this.factory.createTxCreateDoc(plugin.class.AsyncTriggerState, plugin.space.TriggerState, {
|
||||
tx: _tx as TxCUD<Doc>,
|
||||
message: 'Processing...'
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (result.length > 0) {
|
||||
await this.storage.apply(this.metrics, result, false)
|
||||
this.processing = this.doProcessing()
|
||||
}
|
||||
}
|
||||
|
||||
private async doProcessing (): Promise<void> {
|
||||
while (!this.canceling) {
|
||||
const docs = await this.storage.findAll(this.metrics, plugin.class.AsyncTriggerState, {}, { limit: 10 })
|
||||
if (docs.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const doc of docs) {
|
||||
const result: Tx[] = []
|
||||
if (this.canceling) {
|
||||
break
|
||||
}
|
||||
|
||||
try {
|
||||
for (const f of this.functions) {
|
||||
result.push(...(await f(doc.tx, this.control)))
|
||||
}
|
||||
} catch (err: any) {}
|
||||
await this.storage.apply(this.metrics, [this.factory.createTxRemoveDoc(doc._class, doc.space, doc._id)], false)
|
||||
|
||||
await this.storage.apply(this.metrics, result, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -54,13 +54,15 @@ import { FullTextIndex } from './fulltext'
|
||||
import { FullTextIndexPipeline } from './indexer'
|
||||
import { FullTextPipelineStage } from './indexer/types'
|
||||
import serverCore from './plugin'
|
||||
import { AsyncTriggerProcessor } from './processor'
|
||||
import { Triggers } from './triggers'
|
||||
import type {
|
||||
ContentAdapterFactory,
|
||||
ContentTextAdapter,
|
||||
FullTextAdapter,
|
||||
FullTextAdapterFactory,
|
||||
ObjectDDParticipant
|
||||
ObjectDDParticipant,
|
||||
TriggerControl
|
||||
} from './types'
|
||||
import { createCacheFindAll } from './utils'
|
||||
|
||||
@ -82,16 +84,15 @@ export interface DbConfiguration {
|
||||
domains: Record<string, string>
|
||||
defaultAdapter: string
|
||||
workspace: WorkspaceId
|
||||
metrics: MeasureContext
|
||||
fulltextAdapter: {
|
||||
factory: FullTextAdapterFactory
|
||||
url: string
|
||||
metrics: MeasureContext
|
||||
stages: FullTextPipelineStageFactory
|
||||
}
|
||||
contentAdapter: {
|
||||
factory: ContentAdapterFactory
|
||||
url: string
|
||||
metrics: MeasureContext
|
||||
}
|
||||
storageFactory?: () => MinioService
|
||||
}
|
||||
@ -99,6 +100,7 @@ export interface DbConfiguration {
|
||||
class TServerStorage implements ServerStorage {
|
||||
private readonly fulltext: FullTextIndex
|
||||
hierarchy: Hierarchy
|
||||
triggerProcessor: AsyncTriggerProcessor
|
||||
|
||||
scopes = new Map<string, Promise<any>>()
|
||||
|
||||
@ -112,16 +114,19 @@ class TServerStorage implements ServerStorage {
|
||||
readonly storageAdapter: MinioService | undefined,
|
||||
readonly modelDb: ModelDb,
|
||||
private readonly workspace: WorkspaceId,
|
||||
private readonly contentAdapter: ContentTextAdapter,
|
||||
readonly indexFactory: (storage: ServerStorage) => FullTextIndex,
|
||||
options?: ServerStorageOptions
|
||||
readonly options: ServerStorageOptions,
|
||||
metrics: MeasureContext
|
||||
) {
|
||||
this.hierarchy = hierarchy
|
||||
this.fulltext = indexFactory(this)
|
||||
this.triggerProcessor = new AsyncTriggerProcessor(modelDb, hierarchy, this, metrics.newChild('triggers', {}))
|
||||
void this.triggerProcessor.start()
|
||||
}
|
||||
|
||||
async close (): Promise<void> {
|
||||
await this.fulltext.close()
|
||||
await this.triggerProcessor.cancel()
|
||||
for (const o of this.adapters.values()) {
|
||||
await o.close()
|
||||
}
|
||||
@ -549,11 +554,7 @@ class TServerStorage implements ServerStorage {
|
||||
)
|
||||
const moves = await ctx.with('process-move', {}, () => this.processMove(ctx, txes, findAll))
|
||||
|
||||
const triggers = await ctx.with('process-triggers', {}, async (ctx) => {
|
||||
const result: Tx[] = []
|
||||
for (const tx of txes) {
|
||||
result.push(
|
||||
...(await this.triggers.apply(tx.modifiedBy, tx, {
|
||||
const triggerControl: Omit<TriggerControl, 'txFactory'> = {
|
||||
removedMap,
|
||||
workspace: this.workspace,
|
||||
fx: triggerFx.fx,
|
||||
@ -568,12 +569,13 @@ class TServerStorage implements ServerStorage {
|
||||
},
|
||||
findAll: fAll(ctx),
|
||||
modelDb: this.modelDb,
|
||||
hierarchy: this.hierarchy,
|
||||
txFx: async (f) => {
|
||||
await f(this.getAdapter(DOMAIN_TX))
|
||||
hierarchy: this.hierarchy
|
||||
}
|
||||
}))
|
||||
)
|
||||
const triggers = await ctx.with('process-triggers', {}, async (ctx) => {
|
||||
const result: Tx[] = []
|
||||
for (const tx of txes) {
|
||||
result.push(...(await this.triggers.apply(tx.modifiedBy, tx, triggerControl)))
|
||||
await ctx.with('async-triggers', {}, (ctx) => this.triggerProcessor.tx([tx]))
|
||||
}
|
||||
return result
|
||||
})
|
||||
@ -639,6 +641,40 @@ class TServerStorage implements ServerStorage {
|
||||
return { passed, onEnd }
|
||||
}
|
||||
|
||||
async apply (ctx: MeasureContext, tx: Tx[], broadcast: boolean): Promise<Tx[]> {
|
||||
const triggerFx = new Effects()
|
||||
const cacheFind = createCacheFindAll(this)
|
||||
|
||||
const txToStore = tx.filter(
|
||||
(it) => it.space !== core.space.DerivedTx && !this.hierarchy.isDerived(it._class, core.class.TxApplyIf)
|
||||
)
|
||||
await ctx.with('domain-tx', {}, async () => await this.getAdapter(DOMAIN_TX).tx(...txToStore))
|
||||
|
||||
await ctx.with('apply', {}, (ctx) => this.routeTx(ctx, ...tx))
|
||||
|
||||
// send transactions
|
||||
if (broadcast) {
|
||||
this.options?.broadcast?.(tx)
|
||||
}
|
||||
// invoke triggers and store derived objects
|
||||
const derived = await this.proccessDerived(ctx, tx, triggerFx, cacheFind, new Map<Ref<Doc>, Doc>())
|
||||
|
||||
// index object
|
||||
for (const _tx of tx) {
|
||||
await ctx.with('fulltext', {}, (ctx) => this.fulltext.tx(ctx, _tx))
|
||||
}
|
||||
|
||||
// index derived objects
|
||||
for (const tx of derived) {
|
||||
await ctx.with('derived-processor', { _class: txClass(tx) }, (ctx) => this.fulltext.tx(ctx, tx))
|
||||
}
|
||||
|
||||
for (const fx of triggerFx.effects) {
|
||||
await fx()
|
||||
}
|
||||
return [...tx, ...derived]
|
||||
}
|
||||
|
||||
async tx (ctx: MeasureContext, tx: Tx): Promise<[TxResult, Tx[]]> {
|
||||
// store tx
|
||||
const _class = txClass(tx)
|
||||
@ -753,13 +789,15 @@ export interface ServerStorageOptions {
|
||||
|
||||
// Indexing is not required to be started for upgrade mode.
|
||||
upgrade: boolean
|
||||
|
||||
broadcast?: (tx: Tx[]) => void
|
||||
}
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export async function createServerStorage (
|
||||
conf: DbConfiguration,
|
||||
options?: ServerStorageOptions
|
||||
options: ServerStorageOptions
|
||||
): Promise<ServerStorage> {
|
||||
const hierarchy = new Hierarchy()
|
||||
const triggers = new Triggers()
|
||||
@ -803,13 +841,15 @@ export async function createServerStorage (
|
||||
const fulltextAdapter = await conf.fulltextAdapter.factory(
|
||||
conf.fulltextAdapter.url,
|
||||
conf.workspace,
|
||||
conf.fulltextAdapter.metrics
|
||||
conf.metrics.newChild('fulltext', {})
|
||||
)
|
||||
|
||||
const metrics = conf.metrics.newChild('server-storage', {})
|
||||
|
||||
const contentAdapter = await conf.contentAdapter.factory(
|
||||
conf.contentAdapter.url,
|
||||
conf.workspace,
|
||||
conf.contentAdapter.metrics
|
||||
metrics.newChild('content', {})
|
||||
)
|
||||
|
||||
const defaultAdapter = adapters.get(conf.defaultAdapter)
|
||||
@ -827,7 +867,7 @@ export async function createServerStorage (
|
||||
stages,
|
||||
hierarchy,
|
||||
conf.workspace,
|
||||
fulltextAdapter.metrics(),
|
||||
metrics.newChild('fulltext', {}),
|
||||
modelDb
|
||||
)
|
||||
return new FullTextIndex(
|
||||
@ -837,10 +877,9 @@ export async function createServerStorage (
|
||||
storageAdapter,
|
||||
conf.workspace,
|
||||
indexer,
|
||||
options?.upgrade ?? false
|
||||
options.upgrade ?? false
|
||||
)
|
||||
}
|
||||
|
||||
return new TServerStorage(
|
||||
conf.domains,
|
||||
conf.defaultAdapter,
|
||||
@ -851,9 +890,9 @@ export async function createServerStorage (
|
||||
storageAdapter,
|
||||
modelDb,
|
||||
conf.workspace,
|
||||
contentAdapter,
|
||||
indexFactory,
|
||||
options
|
||||
options,
|
||||
metrics
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -32,6 +32,7 @@ import {
|
||||
Storage,
|
||||
Timestamp,
|
||||
Tx,
|
||||
TxCUD,
|
||||
TxFactory,
|
||||
TxResult,
|
||||
WorkspaceId
|
||||
@ -112,8 +113,6 @@ export interface TriggerControl {
|
||||
// Later can be replaced with generic one with bucket encapsulated inside.
|
||||
storageFx: (f: (adapter: MinioService, workspaceId: WorkspaceId) => Promise<void>) => void
|
||||
fx: (f: () => Promise<void>) => void
|
||||
|
||||
txFx: (f: (storage: Storage) => Promise<void>) => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
@ -121,6 +120,20 @@ export interface TriggerControl {
|
||||
*/
|
||||
export type TriggerFunc = (tx: Tx, ctrl: TriggerControl) => Promise<Tx[]>
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface AsyncTriggerControl {
|
||||
txFactory: TxFactory
|
||||
findAll: Storage['findAll']
|
||||
hierarchy: Hierarchy
|
||||
modelDb: ModelDb
|
||||
}
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export type AsyncTriggerFunc = (tx: Tx, ctrl: AsyncTriggerControl) => Promise<Tx[]>
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
@ -128,6 +141,22 @@ export interface Trigger extends Doc {
|
||||
trigger: Resource<TriggerFunc>
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface AsyncTrigger extends Doc {
|
||||
trigger: Resource<AsyncTriggerFunc>
|
||||
classes: Ref<Class<Doc>>[]
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface AsyncTriggerState extends Doc {
|
||||
tx: TxCUD<Doc>
|
||||
message: string
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
|
@ -196,22 +196,22 @@ class ElasticAdapter implements FullTextAdapter {
|
||||
}
|
||||
},
|
||||
script: {
|
||||
source: `Math.abs(cosineSimilarity(params.queryVector, '${options.field}')) + 1`,
|
||||
source: `cosineSimilarity(params.queryVector, '${options.field}') + 1`,
|
||||
params: {
|
||||
queryVector: embedding
|
||||
}
|
||||
},
|
||||
boost: options.embeddingBoost ?? 100.0
|
||||
boost: options.embeddingBoost ?? 10.0
|
||||
}
|
||||
},
|
||||
{
|
||||
simple_query_string: {
|
||||
query: search.$search,
|
||||
flags: 'OR|PREFIX|PHRASE',
|
||||
default_operator: 'and',
|
||||
boost: options.fulltextBoost ?? 1
|
||||
}
|
||||
}
|
||||
// ,{
|
||||
// simple_query_string: {
|
||||
// query: search.$search,
|
||||
// flags: 'OR|PREFIX|PHRASE',
|
||||
// default_operator: 'and',
|
||||
// boost: options.fulltextBoost ?? 1
|
||||
// }
|
||||
// }
|
||||
],
|
||||
filter: [
|
||||
{
|
||||
@ -235,9 +235,10 @@ class ElasticAdapter implements FullTextAdapter {
|
||||
const sourceHits = result.body.hits.hits
|
||||
|
||||
const min = options?.minScore ?? 75
|
||||
const hits: any[] = sourceHits.filter((it: any) => it._score > min)
|
||||
const embBoost = options.embeddingBoost ?? 10.0
|
||||
|
||||
return hits.map((hit) => ({ ...hit._source, _score: hit._score - (options.embeddingBoost ?? 100.0) }))
|
||||
const hits: any[] = sourceHits.filter((it: any) => it._score - embBoost > min)
|
||||
return hits.map((hit) => ({ ...hit._source, _score: hit._score - embBoost }))
|
||||
} catch (err) {
|
||||
console.error(JSON.stringify(err, null, 2))
|
||||
return []
|
||||
|
@ -13,10 +13,27 @@
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
import type { Account, Arr, Class, Data, Doc, Mixin, Obj, Ref, TxCreateDoc, TxCUD } from '@hcengineering/core'
|
||||
import core, { AttachedDoc, ClassifierKind, DOMAIN_MODEL, DOMAIN_TX, TxFactory } from '@hcengineering/core'
|
||||
import core, {
|
||||
Account,
|
||||
Arr,
|
||||
AttachedDoc,
|
||||
Class,
|
||||
ClassifierKind,
|
||||
Data,
|
||||
Doc,
|
||||
DOMAIN_DOC_INDEX_STATE,
|
||||
DOMAIN_MODEL,
|
||||
DOMAIN_TX,
|
||||
Mixin,
|
||||
Obj,
|
||||
Ref,
|
||||
TxCreateDoc,
|
||||
TxCUD,
|
||||
TxFactory
|
||||
} from '@hcengineering/core'
|
||||
import type { IntlString, Plugin } from '@hcengineering/platform'
|
||||
import { plugin } from '@hcengineering/platform'
|
||||
import server from '@hcengineering/server-core'
|
||||
|
||||
export const txFactory = new TxFactory(core.account.System)
|
||||
|
||||
@ -101,6 +118,30 @@ export function genMinModel (): TxCUD<Doc>[] {
|
||||
domain: DOMAIN_MODEL
|
||||
})
|
||||
)
|
||||
txes.push(
|
||||
createClass(core.class.DocIndexState, {
|
||||
label: 'DocIndexState' as IntlString,
|
||||
extends: core.class.Doc,
|
||||
kind: ClassifierKind.CLASS,
|
||||
domain: DOMAIN_DOC_INDEX_STATE
|
||||
})
|
||||
)
|
||||
txes.push(
|
||||
createClass(server.class.AsyncTrigger, {
|
||||
label: 'AsyncTrigger' as IntlString,
|
||||
extends: core.class.Doc,
|
||||
kind: ClassifierKind.CLASS,
|
||||
domain: DOMAIN_MODEL
|
||||
})
|
||||
)
|
||||
txes.push(
|
||||
createClass(server.class.AsyncTriggerState, {
|
||||
label: 'AsyncTriggerState' as IntlString,
|
||||
extends: core.class.Doc,
|
||||
kind: ClassifierKind.CLASS,
|
||||
domain: DOMAIN_DOC_INDEX_STATE
|
||||
})
|
||||
)
|
||||
txes.push(
|
||||
createClass(core.class.Account, {
|
||||
label: 'Account' as IntlString,
|
||||
|
@ -143,21 +143,20 @@ describe('mongo operations', () => {
|
||||
url: ''
|
||||
}
|
||||
},
|
||||
metrics: new MeasureMetricsContext('', {}),
|
||||
fulltextAdapter: {
|
||||
factory: createNullFullTextAdapter,
|
||||
url: '',
|
||||
stages: () => [],
|
||||
metrics: new MeasureMetricsContext('', {})
|
||||
stages: () => []
|
||||
},
|
||||
contentAdapter: {
|
||||
factory: createNullContentTextAdapter,
|
||||
url: '',
|
||||
metrics: new MeasureMetricsContext('', {})
|
||||
url: ''
|
||||
},
|
||||
workspace: getWorkspaceId(dbId, ''),
|
||||
storageFactory: () => createNullStorageFactory()
|
||||
}
|
||||
const serverStorage = await createServerStorage(conf)
|
||||
const serverStorage = await createServerStorage(conf, { upgrade: false })
|
||||
const ctx = new MeasureMetricsContext('client', {})
|
||||
client = await createClient(async (handler) => {
|
||||
const st: ClientConnection = {
|
||||
|
@ -19,6 +19,7 @@ import core, {
|
||||
Ref,
|
||||
Space,
|
||||
toWorkspaceString,
|
||||
Tx,
|
||||
TxFactory,
|
||||
WorkspaceId
|
||||
} from '@hcengineering/core'
|
||||
@ -27,7 +28,7 @@ import type { Pipeline } from '@hcengineering/server-core'
|
||||
import { decodeToken, Token } from '@hcengineering/server-token'
|
||||
import { createServer, IncomingMessage } from 'http'
|
||||
import WebSocket, { WebSocketServer } from 'ws'
|
||||
import { BroadcastCall, Session } from './types'
|
||||
import { BroadcastCall, PipelineFactory, Session } from './types'
|
||||
|
||||
let LOGGING_ENABLED = true
|
||||
|
||||
@ -56,7 +57,7 @@ class SessionManager {
|
||||
ctx: MeasureContext,
|
||||
ws: WebSocket,
|
||||
token: Token,
|
||||
pipelineFactory: (ws: WorkspaceId, upgrade: boolean) => Promise<Pipeline>,
|
||||
pipelineFactory: PipelineFactory,
|
||||
productId: string
|
||||
): Promise<Session> {
|
||||
const wsString = toWorkspaceString(token.workspace, '@')
|
||||
@ -83,7 +84,7 @@ class SessionManager {
|
||||
}
|
||||
if (LOGGING_ENABLED) console.log('no sessions for workspace', wsString)
|
||||
// Re-create pipeline.
|
||||
workspace.pipeline = pipelineFactory(token.workspace, true)
|
||||
workspace.pipeline = pipelineFactory(token.workspace, true, (tx) => this.broadcastAll(workspace as Workspace, tx))
|
||||
|
||||
const pipeline = await workspace.pipeline
|
||||
const session = this.createSession(token, pipeline)
|
||||
@ -103,14 +104,20 @@ class SessionManager {
|
||||
return session
|
||||
}
|
||||
|
||||
private createWorkspace (
|
||||
pipelineFactory: (ws: WorkspaceId, upgrade: boolean) => Promise<Pipeline>,
|
||||
token: Token
|
||||
): Workspace {
|
||||
broadcastAll (workspace: Workspace, tx: Tx[]): void {
|
||||
for (const _tx of tx) {
|
||||
const msg = serialize({ result: _tx })
|
||||
for (const session of workspace.sessions) {
|
||||
session[1].send(msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private createWorkspace (pipelineFactory: PipelineFactory, token: Token): Workspace {
|
||||
const upgrade = token.extra?.model === 'upgrade'
|
||||
const workspace = {
|
||||
const workspace: Workspace = {
|
||||
id: generateId(),
|
||||
pipeline: pipelineFactory(token.workspace, upgrade),
|
||||
pipeline: pipelineFactory(token.workspace, upgrade, (tx) => this.broadcastAll(workspace, tx)),
|
||||
sessions: [],
|
||||
upgrade
|
||||
}
|
||||
@ -305,7 +312,7 @@ async function handleRequest<S extends Session> (
|
||||
*/
|
||||
export function start (
|
||||
ctx: MeasureContext,
|
||||
pipelineFactory: (workspace: WorkspaceId, upgrade: boolean) => Promise<Pipeline>,
|
||||
pipelineFactory: PipelineFactory,
|
||||
sessionFactory: (token: Token, pipeline: Pipeline, broadcast: BroadcastCall) => Session,
|
||||
port: number,
|
||||
productId: string,
|
||||
|
@ -38,3 +38,8 @@ export type BroadcastCall = (
|
||||
resp: Response<any>,
|
||||
target?: string
|
||||
) => void
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export type PipelineFactory = (ws: WorkspaceId, upgrade: boolean, broadcast: (tx: Tx[]) => void) => Promise<Pipeline>
|
||||
|
@ -3,6 +3,7 @@
|
||||
"version": "0.6.0",
|
||||
"scripts": {
|
||||
"build": "tsc --incremental --noEmit --outDir ./dist_cache && echo build",
|
||||
"build:watch": "tsc --incremental --noEmit --outDir ./dist_cache",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"lint": "svelte-check && eslint",
|
||||
"format": "prettier --write --plugin-search-dir=. src && eslint --fix src"
|
||||
|
18
tools/apm/config/rig.json
Normal file
18
tools/apm/config/rig.json
Normal file
@ -0,0 +1,18 @@
|
||||
// The "rig.json" file directs tools to look for their config files in an external package.
|
||||
// Documentation for this system: https://www.npmjs.com/package/@rushstack/rig-package
|
||||
{
|
||||
"$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json",
|
||||
|
||||
/**
|
||||
* (Required) The name of the rig package to inherit from.
|
||||
* It should be an NPM package name with the "-rig" suffix.
|
||||
*/
|
||||
"rigPackageName": "@hcengineering/platform-rig"
|
||||
|
||||
/**
|
||||
* (Optional) Selects a config profile from the rig package. The name must consist of
|
||||
* lowercase alphanumeric words separated by hyphens, for example "sample-profile".
|
||||
* If omitted, then the "default" profile will be used."
|
||||
*/
|
||||
// "rigProfile": "your-profile-name"
|
||||
}
|
@ -6,7 +6,7 @@
|
||||
"license": "EPL-2.0",
|
||||
"bin": "./lib/apm.js",
|
||||
"scripts": {
|
||||
"build": "heft build && esbuild src/index.ts --bundle --minify --platform=node > ./apm.js && echo 'build'",
|
||||
"build": "heft build",
|
||||
"build:watch": "tsc",
|
||||
"lint:fix": "eslint --fix src",
|
||||
"lint": "eslint src",
|
||||
|
Loading…
Reference in New Issue
Block a user