Improve init script (#6089)

Signed-off-by: Denis Bykhov <bykhov.denis@gmail.com>
This commit is contained in:
Denis Bykhov 2024-07-18 13:11:42 +05:00 committed by GitHub
parent 58c7027a04
commit 284a1c5b9b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 2085 additions and 233 deletions

View File

@ -1052,6 +1052,9 @@ dependencies:
'@types/jest': '@types/jest':
specifier: ^29.5.5 specifier: ^29.5.5
version: 29.5.12 version: 29.5.12
'@types/js-yaml':
specifier: ^4.0.9
version: 4.0.9
'@types/koa': '@types/koa':
specifier: 2.14.0 specifier: 2.14.0
version: 2.14.0 version: 2.14.0
@ -1070,6 +1073,9 @@ dependencies:
'@types/koa__cors': '@types/koa__cors':
specifier: ^3.0.3 specifier: ^3.0.3
version: 3.3.1 version: 3.3.1
'@types/markdown-it':
specifier: ~13.0.0
version: 13.0.8
'@types/mime-types': '@types/mime-types':
specifier: ~2.1.1 specifier: ~2.1.1
version: 2.1.4 version: 2.1.4
@ -1310,6 +1316,9 @@ dependencies:
jest-environment-jsdom: jest-environment-jsdom:
specifier: 29.7.0 specifier: 29.7.0
version: 29.7.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) version: 29.7.0(bufferutil@4.0.8)(utf-8-validate@6.0.3)
js-yaml:
specifier: ^4.1.0
version: 4.1.0
jwt-simple: jwt-simple:
specifier: ^0.5.6 specifier: ^0.5.6
version: 0.5.6 version: 0.5.6
@ -1340,6 +1349,9 @@ dependencies:
livekit-client: livekit-client:
specifier: ^2.0.10 specifier: ^2.0.10
version: 2.2.0 version: 2.2.0
markdown-it:
specifier: ^14.0.0
version: 14.0.0
mime-types: mime-types:
specifier: ~2.1.34 specifier: ~2.1.34
version: 2.1.35 version: 2.1.35
@ -7654,6 +7666,10 @@ packages:
pretty-format: 29.7.0 pretty-format: 29.7.0
dev: false dev: false
/@types/js-yaml@4.0.9:
resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==}
dev: false
/@types/jsdom@20.0.1: /@types/jsdom@20.0.1:
resolution: {integrity: sha512-d0r18sZPmMQr1eG35u12FZfhIXNrnsPU/g5wvRKCUf/tOGilKKwYMYGqh33BNR6ba+2gkHw1EUiHoN3mn7E5IQ==} resolution: {integrity: sha512-d0r18sZPmMQr1eG35u12FZfhIXNrnsPU/g5wvRKCUf/tOGilKKwYMYGqh33BNR6ba+2gkHw1EUiHoN3mn7E5IQ==}
dependencies: dependencies:
@ -7738,10 +7754,25 @@ packages:
'@types/koa': 2.14.0 '@types/koa': 2.14.0
dev: false dev: false
/@types/linkify-it@3.0.5:
resolution: {integrity: sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==}
dev: false
/@types/lodash@4.14.202: /@types/lodash@4.14.202:
resolution: {integrity: sha512-OvlIYQK9tNneDlS0VN54LLd5uiPCBOp7gS5Z0f1mjoJYBrtStzgmJBxONW3U6OZqdtNzZPmn9BS/7WI7BFFcFQ==} resolution: {integrity: sha512-OvlIYQK9tNneDlS0VN54LLd5uiPCBOp7gS5Z0f1mjoJYBrtStzgmJBxONW3U6OZqdtNzZPmn9BS/7WI7BFFcFQ==}
dev: false dev: false
/@types/markdown-it@13.0.8:
resolution: {integrity: sha512-V+KmpgiipS+zoypeUSS9ojesWtY/0k4XfqcK2fnVrX/qInJhX7rsCxZ/rygiPH2zxlPPrhfuW0I6ddMcWTKLsg==}
dependencies:
'@types/linkify-it': 3.0.5
'@types/mdurl': 1.0.5
dev: false
/@types/mdurl@1.0.5:
resolution: {integrity: sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==}
dev: false
/@types/mdx@2.0.11: /@types/mdx@2.0.11:
resolution: {integrity: sha512-HM5bwOaIQJIQbAYfax35HCKxx7a3KrK3nBtIqJgSOitivTD1y3oW9P3rxY9RkXYPUk7y/AjAohfHKmFpGE79zw==} resolution: {integrity: sha512-HM5bwOaIQJIQbAYfax35HCKxx7a3KrK3nBtIqJgSOitivTD1y3oW9P3rxY9RkXYPUk7y/AjAohfHKmFpGE79zw==}
dev: false dev: false
@ -25736,12 +25767,13 @@ packages:
dev: false dev: false
file:projects/server-tool.tgz(@types/node@20.11.19)(bufferutil@4.0.8)(esbuild@0.20.1)(ts-node@10.9.2)(utf-8-validate@6.0.3): file:projects/server-tool.tgz(@types/node@20.11.19)(bufferutil@4.0.8)(esbuild@0.20.1)(ts-node@10.9.2)(utf-8-validate@6.0.3):
resolution: {integrity: sha512-7THPdPOTi6rdVo4B0AUJz4mx3urr/lNWZ88ZzgUjPXTl7T2w2PyplAEoCFvxN8A184+6KM9Wb0trUlnftH72fA==, tarball: file:projects/server-tool.tgz} resolution: {integrity: sha512-3TUSDFQDCGfnf+u6ADBSVX7xBJY3hxkTMPCXdA8r7vmHw2jD+goCUOWjFwCKHyBm2/ObM3ABgrqoysGt2loLsA==, tarball: file:projects/server-tool.tgz}
id: file:projects/server-tool.tgz id: file:projects/server-tool.tgz
name: '@rush-temp/server-tool' name: '@rush-temp/server-tool'
version: 0.0.0 version: 0.0.0
dependencies: dependencies:
'@types/jest': 29.5.12 '@types/jest': 29.5.12
'@types/js-yaml': 4.0.9
'@types/uuid': 8.3.4 '@types/uuid': 8.3.4
'@types/ws': 8.5.10 '@types/ws': 8.5.10
'@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3) '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3)
@ -25753,6 +25785,7 @@ packages:
eslint-plugin-promise: 6.1.1(eslint@8.56.0) eslint-plugin-promise: 6.1.1(eslint@8.56.0)
fast-equals: 5.0.1 fast-equals: 5.0.1
jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2) jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2)
js-yaml: 4.1.0
mongodb: 6.8.0 mongodb: 6.8.0
prettier: 3.2.5 prettier: 3.2.5
ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3) ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3)
@ -26984,7 +27017,7 @@ packages:
dev: false dev: false
file:projects/text.tgz(@types/node@20.11.19)(bufferutil@4.0.8)(esbuild@0.20.1)(ts-node@10.9.2)(utf-8-validate@6.0.3)(y-protocols@1.0.6): file:projects/text.tgz(@types/node@20.11.19)(bufferutil@4.0.8)(esbuild@0.20.1)(ts-node@10.9.2)(utf-8-validate@6.0.3)(y-protocols@1.0.6):
resolution: {integrity: sha512-mUWWTKZWMvUURAcxKRJaO/G5XdwdP7eAOACU3VU2qA2/RTtHGCREgKMpFFK/LwXXqwx0x+LBwfuozf6jLdyjBg==, tarball: file:projects/text.tgz} resolution: {integrity: sha512-ZQInjd9DHWVEXBeaR3wy+jQjDfHYnvJGKBiRWjr6i1AKIl/GNdDG/3G8QTQpu6O637ME04TzDGRbe3/9KNY+0w==, tarball: file:projects/text.tgz}
id: file:projects/text.tgz id: file:projects/text.tgz
name: '@rush-temp/text' name: '@rush-temp/text'
version: 0.0.0 version: 0.0.0
@ -27011,6 +27044,7 @@ packages:
'@tiptap/starter-kit': 2.2.4(@tiptap/pm@2.2.4) '@tiptap/starter-kit': 2.2.4(@tiptap/pm@2.2.4)
'@tiptap/suggestion': 2.2.4(@tiptap/core@2.2.4)(@tiptap/pm@2.2.4) '@tiptap/suggestion': 2.2.4(@tiptap/core@2.2.4)(@tiptap/pm@2.2.4)
'@types/jest': 29.5.12 '@types/jest': 29.5.12
'@types/markdown-it': 13.0.8
'@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3) '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3)
'@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.3.3) '@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.3.3)
eslint: 8.56.0 eslint: 8.56.0
@ -27021,6 +27055,7 @@ packages:
fast-equals: 5.0.1 fast-equals: 5.0.1
jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2) jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2)
jest-environment-jsdom: 29.7.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) jest-environment-jsdom: 29.7.0(bufferutil@4.0.8)(utf-8-validate@6.0.3)
markdown-it: 14.0.0
prettier: 3.2.5 prettier: 3.2.5
prosemirror-codemark: 0.4.2(prosemirror-model@1.19.4) prosemirror-codemark: 0.4.2(prosemirror-model@1.19.4)
prosemirror-model: 1.19.4 prosemirror-model: 1.19.4

View File

@ -145,6 +145,10 @@ export function devTool (
if (initWS !== undefined) { if (initWS !== undefined) {
setMetadata(toolPlugin.metadata.InitWorkspace, initWS) setMetadata(toolPlugin.metadata.InitWorkspace, initWS)
} }
const initScriptUrl = process.env.INIT_SCRIPT_URL
if (initScriptUrl !== undefined) {
setMetadata(toolPlugin.metadata.InitScriptURL, initScriptUrl)
}
setMetadata(toolPlugin.metadata.Endpoint, transactorUrl) setMetadata(toolPlugin.metadata.Endpoint, transactorUrl)
setMetadata(toolPlugin.metadata.Transactor, transactorUrl) setMetadata(toolPlugin.metadata.Transactor, transactorUrl)

View File

@ -35,7 +35,8 @@
"jest": "^29.7.0", "jest": "^29.7.0",
"jest-environment-jsdom": "29.7.0", "jest-environment-jsdom": "29.7.0",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"@types/jest": "^29.5.5" "@types/jest": "^29.5.5",
"@types/markdown-it": "~13.0.0"
}, },
"dependencies": { "dependencies": {
"@hcengineering/core": "^0.6.32", "@hcengineering/core": "^0.6.32",
@ -62,6 +63,7 @@
"@tiptap/suggestion": "^2.2.4", "@tiptap/suggestion": "^2.2.4",
"prosemirror-model": "^1.19.4", "prosemirror-model": "^1.19.4",
"prosemirror-codemark": "^0.4.2", "prosemirror-codemark": "^0.4.2",
"markdown-it": "^14.0.0",
"fast-equals": "^5.0.1", "fast-equals": "^5.0.1",
"yjs": "^13.5.52", "yjs": "^13.5.52",
"y-prosemirror": "^1.2.1" "y-prosemirror": "^1.2.1"

View File

@ -21,6 +21,11 @@ export * from './markup/utils'
export * from './nodes' export * from './nodes'
export * from './ydoc' export * from './ydoc'
export * from './marks/code' export * from './marks/code'
export * from './markdown'
export * from './markdown/serializer'
export * from './markdown/parser'
export * from './markdown/compare'
export * from './markdown/node'
export * from './kits/default-kit' export * from './kits/default-kit'
export * from './kits/server-kit' export * from './kits/server-kit'

View File

@ -0,0 +1,46 @@
/**
* Calculate SørensenDice coefficient
*/
export function calcSørensenDiceCoefficient (a: string, b: string): number {
const first = a.replace(/\s+/g, '')
const second = b.replace(/\s+/g, '')
if (first === second) return 1 // identical or empty
if (first.length < 2 || second.length < 2) return 0 // if either is a 0-letter or 1-letter string
const firstBigrams = new Map<string, number>()
for (let i = 0; i < first.length - 1; i++) {
const bigram = first.substring(i, i + 2)
const count = (firstBigrams.get(bigram) ?? 0) + 1
firstBigrams.set(bigram, count)
}
let intersectionSize = 0
for (let i = 0; i < second.length - 1; i++) {
const bigram = second.substring(i, i + 2)
const count = firstBigrams.get(bigram) ?? 0
if (count > 0) {
firstBigrams.set(bigram, count - 1)
intersectionSize++
}
}
return (2.0 * intersectionSize) / (first.length + second.length - 2)
}
/**
* Perform markdown diff/comparison to understand do we have a major differences.
*/
export function isMarkdownsEquals (source1: string, source2: string): boolean {
const lines1 = source1
.split('\n')
.map((it) => it.trimEnd())
.join('\n')
const lines2 = source2
.split('\n')
.map((it) => it.trimEnd())
.join('\n')
return lines1 === lines2
}

View File

@ -0,0 +1,42 @@
import { Extensions } from '@tiptap/core'
import { defaultExtensions } from '../extensions'
import { MarkdownParser } from './parser'
import { MarkdownState, storeMarks, storeNodes } from './serializer'
import { MarkupNode } from '../markup/model'
import { markupToJSON } from '../markup/utils'
/**
* @public
*/
export function parseMessageMarkdown (
message: string | undefined,
imageUrl: string,
refUrl: string = 'ref://',
extensions: Extensions = defaultExtensions
): MarkupNode {
const parser = new MarkdownParser(extensions, refUrl, imageUrl)
return parser.parse(message ?? '')
}
/**
* @public
*/
export function serializeMessage (node: MarkupNode, refUrl: string, imageUrl: string): string {
const state = new MarkdownState(storeNodes, storeMarks, { tightLists: true, refUrl, imageUrl })
state.renderContent(node)
return state.out
}
/**
* @public
*/
export async function markupToMarkdown (
markup: string,
refUrl: string = 'ref://',
imageUrl: string = 'http://localhost',
preprocessor?: (nodes: MarkupNode) => Promise<void>
): Promise<string> {
const json = markupToJSON(markup)
await preprocessor?.(json)
return serializeMessage(json, refUrl, imageUrl)
}

View File

@ -0,0 +1,35 @@
import { deepEqual } from 'fast-equals'
import { MarkupMark, MarkupMarkType, MarkupNode } from '../markup/model'
export function traverseMarks (node: MarkupNode, f: (el: MarkupMark) => void): void {
node.marks?.forEach(f)
}
export function markAttrs (mark: MarkupMark): Record<string, string> {
return mark.attrs ?? []
}
export function isInSet (mark: MarkupMark, marks: MarkupMark[]): boolean {
return marks.find((m) => markEq(mark, m)) !== undefined
}
export function addToSet (mark: MarkupMark, marks: MarkupMark[]): MarkupMark[] {
const m = marks.find((m) => markEq(mark, m))
if (m !== undefined) {
// We already have mark
return marks
}
return [...marks, mark]
}
export function removeFromSet (markType: MarkupMarkType, marks: MarkupMark[]): MarkupMark[] {
return marks.filter((m) => m.type !== markType)
}
export function sameSet (a?: MarkupMark[], b?: MarkupMark[]): boolean {
return deepEqual(a, b)
}
export function markEq (first: MarkupMark, other: MarkupMark): boolean {
return deepEqual(first, other)
}

View File

@ -0,0 +1,16 @@
import { Attrs, MarkupNode } from '../markup/model'
export function traverseMarkupNode (node: MarkupNode, f: (el: MarkupNode) => void): void {
f(node)
node.content?.forEach((c) => {
traverseMarkupNode(c, f)
})
}
export function messageContent (node: MarkupNode): MarkupNode[] {
return node?.content ?? []
}
export function nodeAttrs (node: MarkupNode): Attrs {
return node.attrs ?? {}
}

View File

@ -0,0 +1,722 @@
import { Extensions } from '@tiptap/core'
import MarkdownIt, { type Token } from 'markdown-it'
import type { RuleCore } from 'markdown-it/lib/parser_core'
import type StateCore from 'markdown-it/lib/rules_core/state_core'
import { addToSet, removeFromSet, sameSet } from './marks'
import { messageContent } from './node'
import { Attrs, AttrValue, MarkupMark, MarkupMarkType, MarkupNode, MarkupNodeType } from '../markup/model'
import { htmlToJSON } from '../markup/utils'
interface ParsingBlockRule {
block: MarkupNodeType
getAttrs?: (tok: Token, state: MarkdownParseState) => Attrs
wrapContent?: boolean
noCloseToken?: boolean
}
interface ParsingNodeRule {
node: MarkupNodeType
getAttrs?: (tok: Token, state: MarkdownParseState) => Attrs
}
interface ParsingMarkRule {
mark: MarkupMarkType
getAttrs?: (tok: Token, state: MarkdownParseState) => Attrs
noCloseToken?: boolean
}
interface ParsingSpecialRule {
type: (state: MarkdownParseState, tok: Token) => { type: MarkupMarkType | MarkupNodeType, node: boolean }
getAttrs?: (tok: Token, state: MarkdownParseState) => Attrs
}
// eslint-disable-next-line @typescript-eslint/no-empty-interface
interface ParsingIgnoreRule {
// empty
}
type HandlerRecord = (state: MarkdownParseState, tok: Token) => void
type HandlersRecord = Record<string, HandlerRecord>
// ****************************************************************
// Mark down parser
// ****************************************************************
function isText (a: MarkupNode, b: MarkupNode): boolean {
return (a.type === MarkupNodeType.text || a.type === MarkupNodeType.reference) && b.type === MarkupNodeType.text
}
function maybeMerge (a: MarkupNode, b: MarkupNode): MarkupNode | undefined {
if (isText(a, b) && (sameSet(a.marks, b.marks) || (a.text === '' && (a.marks?.length ?? 0) === 0))) {
if (a.text === '' && (a.marks?.length ?? 0) === 0) {
return { ...b }
}
return { ...a, text: (a.text ?? '') + (b.text ?? '') }
}
return undefined
}
interface StateElement {
type: MarkupNodeType
content: MarkupNode[]
attrs: Attrs
}
// Object used to track the context of a running parse.
class MarkdownParseState {
stack: StateElement[]
marks: MarkupMark[]
tokenHandlers: Record<string, (state: MarkdownParseState, tok: Token) => void>
constructor (
tokenHandlers: Record<string, (state: MarkdownParseState, tok: Token) => void>,
readonly refUrl: string,
readonly imageUrl: string
) {
this.stack = [{ type: MarkupNodeType.doc, attrs: {}, content: [] }]
this.marks = []
this.tokenHandlers = tokenHandlers
}
top (): StateElement | undefined {
return this.stack[this.stack.length - 1]
}
push (elt: MarkupNode): void {
if (this.stack.length > 0) {
const tt = this.top()
tt?.content.push(elt)
}
}
mergeWithLast (nodes: MarkupNode[], node: MarkupNode): boolean {
const last = nodes[nodes.length - 1]
let merged: MarkupNode | undefined
if (last !== undefined && (merged = maybeMerge(last, node)) !== undefined) {
nodes[nodes.length - 1] = merged
return true
}
return false
}
// : (string)
// Adds the given text to the current position in the document,
// using the current marks as styling.
addText (text?: string): void {
const top = this.top()
if (text === undefined || top === undefined || text.length === 0) {
return
}
const node: MarkupNode = {
type: MarkupNodeType.text,
text
}
if (this.marks !== undefined) {
node.marks = this.marks
}
const nodes = top.content
if (!this.mergeWithLast(nodes, node)) {
nodes.push(node)
}
}
addAttr (key: string, value: AttrValue): void {
const top = this.top()
if (top === undefined) {
return
}
top.attrs[key] = value
}
// : (Mark)
// Adds the given mark to the set of active marks.
openMark (mark: MarkupMark): void {
this.marks = addToSet(mark, this.marks)
}
// : (Mark)
// Removes the given mark from the set of active marks.
closeMark (mark: MarkupMarkType): void {
this.marks = removeFromSet(mark, this.marks)
}
parseTokens (toks: Token[] | null): void {
const _toks = [...(toks ?? [])]
while (_toks.length > 0) {
const tok = _toks.shift()
if (tok === undefined) {
break
}
// Check if we need to merge some content into
// Merge <sub> </sub> into one html token
if (tok.type === 'html_inline' && tok.content.trim() === '<sub>') {
while (_toks.length > 0) {
const _tok = _toks.shift()
if (_tok !== undefined) {
tok.content += _tok.content
if (_tok.type === 'html_inline' && _tok.content.trim() === '</sub>') {
break
}
}
}
}
const handler = this.tokenHandlers[tok.type]
if (handler === undefined) {
throw new Error(`Token type '${String(tok.type)} not supported by Markdown parser`)
}
handler(this, tok)
}
}
// : (NodeType, ?Object, ?[Node]) → ?Node
// Add a node at the current position.
addNode (type: MarkupNodeType, attrs: Attrs, content: MarkupNode[] = []): MarkupNode {
const node: MarkupNode = { type, content }
if (Object.keys(attrs ?? {}).length > 0) {
node.attrs = attrs
}
if (this.marks.length > 0) {
node.marks = this.marks
}
this.push(node)
return node
}
// : (NodeType, ?Object)
// Wrap subsequent content in a node of the given type.
openNode (type: MarkupNodeType, attrs: Attrs): void {
this.stack.push({ type, attrs, content: [] })
}
// : () → ?Node
// Close and return the node that is currently on top of the stack.
closeNode (): MarkupNode {
if (this.marks.length > 0) this.marks = []
const info = this.stack.pop()
if (info !== undefined) {
return this.addNode(info.type, info.attrs, info.content)
}
return { type: MarkupNodeType.doc }
}
}
function attrs (
spec: ParsingBlockRule | ParsingMarkRule | ParsingNodeRule,
token: Token,
state: MarkdownParseState
): Attrs {
return spec.getAttrs?.(token, state) ?? {}
}
// Code content is represented as a single token with a `content`
// property in Markdown-it.
function noCloseToken (spec: ParsingBlockRule | ParsingMarkRule, type: string): boolean {
return (spec.noCloseToken ?? false) || ['code_inline', 'code_block', 'fence'].indexOf(type) > 0
}
function withoutTrailingNewline (str: string): string {
return str[str.length - 1] === '\n' ? str.slice(0, str.length - 1) : str
}
function addSpecBlock (handlers: HandlersRecord, spec: ParsingBlockRule, type: string, specBlock: MarkupNodeType): void {
if (noCloseToken(spec, type)) {
handlers[type] = newSimpleBlockHandler(specBlock, spec)
} else {
handlers[type + '_open'] = (state, tok) => {
state.openNode(specBlock, attrs(spec, tok, state))
if (spec.wrapContent === true) {
state.openNode(MarkupNodeType.paragraph, {})
}
}
handlers[type + '_close'] = (state) => {
if (spec.wrapContent === true) {
state.closeNode()
}
state.closeNode()
}
}
}
function newSimpleBlockHandler (specBlock: MarkupNodeType, spec: ParsingBlockRule): HandlerRecord {
return (state, tok) => {
state.openNode(specBlock, attrs(spec, tok, state))
state.addText(withoutTrailingNewline(tok.content))
state.closeNode()
}
}
function addSpecMark (handlers: HandlersRecord, spec: ParsingMarkRule, type: string, specMark: MarkupMarkType): void {
if (noCloseToken(spec, type)) {
handlers[type] = newSimpleMarkHandler(spec, specMark)
} else {
handlers[type + '_open'] = (state, tok) => {
state.openMark({ type: specMark, attrs: attrs(spec, tok, state) })
}
handlers[type + '_close'] = (state) => {
state.closeMark(specMark)
}
}
}
function addSpecialRule (handlers: HandlersRecord, spec: ParsingSpecialRule, type: string): void {
handlers[type + '_open'] = (state, tok) => {
const type = spec.type(state, tok)
if (type.node) {
state.openNode(type.type as MarkupNodeType, spec.getAttrs?.(tok, state) ?? {})
} else {
state.openMark({ type: type.type as MarkupMarkType, attrs: spec.getAttrs?.(tok, state) ?? {} })
}
}
handlers[type + '_close'] = (state, tok) => {
const type = spec.type(state, tok)
if (type.node) {
state.closeNode()
} else {
state.closeMark(type.type as MarkupMarkType)
}
}
}
function addIgnoreRule (handlers: HandlersRecord, spec: ParsingIgnoreRule, type: string): void {
handlers[type + '_open'] = (state, tok) => {}
handlers[type + '_close'] = (state, tok) => {}
}
function newSimpleMarkHandler (spec: ParsingMarkRule, specMark: MarkupMarkType): HandlerRecord {
return (state: MarkdownParseState, tok: Token): void => {
state.openMark({ attrs: attrs(spec, tok, state), type: specMark })
state.addText(withoutTrailingNewline(tok.content))
state.closeMark(specMark)
}
}
function tokenHandlers (
tokensBlock: Record<string, ParsingBlockRule>,
tokensNode: Record<string, ParsingNodeRule>,
tokensMark: Record<string, ParsingMarkRule>,
specialRules: Record<string, ParsingSpecialRule>,
ignoreRules: Record<string, ParsingIgnoreRule>,
extensions: Extensions
): HandlersRecord {
const handlers: HandlersRecord = {}
Object.entries(tokensBlock).forEach(([type, spec]) => {
addSpecBlock(handlers, spec, type, spec.block)
})
Object.entries(tokensNode).forEach(([type, spec]) => {
addSpecNode(handlers, type, spec)
})
Object.entries(tokensMark).forEach(([type, spec]) => {
addSpecMark(handlers, spec, type, spec.mark)
})
Object.entries(specialRules).forEach(([type, spec]) => {
addSpecialRule(handlers, spec, type)
})
Object.entries(ignoreRules).forEach(([type, spec]) => {
addIgnoreRule(handlers, spec, type)
})
handlers.html_inline = (state: MarkdownParseState, tok: Token) => {
try {
const model = htmlToJSON(tok.content, extensions)
if (model.content !== undefined) {
// unwrap content from wrapping paragraph
const shouldUnwrap =
model.content.length === 1 &&
model.content[0].type === MarkupNodeType.paragraph &&
state.top()?.type === MarkupNodeType.paragraph
const content = messageContent(shouldUnwrap ? model.content[0] : model)
for (const c of content) {
state.push(c)
}
}
} catch (err: any) {
console.error(err)
state.addText(tok.content)
}
}
handlers.html_block = (state: MarkdownParseState, tok: Token) => {
try {
const model = htmlToJSON(tok.content, extensions)
const content = messageContent(model)
for (const c of content) {
state.push(c)
}
} catch (err: any) {
console.error(err)
state.addText(tok.content)
}
}
addTextHandlers(handlers)
return handlers
}
function addTextHandlers (handlers: HandlersRecord): void {
handlers.text = (state, tok) => {
state.addText(tok.content)
}
handlers.inline = (state, tok) => {
state.parseTokens(tok.children)
}
handlers.softbreak = (state) => {
state.addText('\n')
}
}
function addSpecNode (handlers: HandlersRecord, type: string, spec: ParsingNodeRule): void {
handlers[type] = (state: MarkdownParseState, tok: Token) => state.addNode(spec.node, attrs(spec, tok, state))
}
function tokAttrGet (token: Token, name: string): string | undefined {
const attr = token.attrGet(name)
if (attr != null) {
return attr
}
// try iterate attrs
for (const [k, v] of token.attrs ?? []) {
if (k === name) {
return v
}
}
}
function tokToAttrs (token: Token, ...names: string[]): Record<string, string> {
const result: Record<string, string> = {}
for (const name of names) {
const attr = token.attrGet(name)
if (attr !== null) {
result[name] = attr
}
}
return result
}
function todoItemMetaAttrsGet (tok: Token): Record<string, string> {
const userid = tokAttrGet(tok, 'userid')
const todoid = tokAttrGet(tok, 'todoid')
const result: Record<string, string> = {}
if (userid !== undefined) {
result.userid = userid
}
if (todoid !== undefined) {
result.todoid = todoid
}
return result
}
// ::- A configuration of a Markdown parser. Such a parser uses
const tokensBlock: Record<string, ParsingBlockRule> = {
blockquote: { block: MarkupNodeType.blockquote },
paragraph: { block: MarkupNodeType.paragraph },
list_item: { block: MarkupNodeType.list_item },
task_item: { block: MarkupNodeType.taskItem, getAttrs: (tok) => ({ 'data-type': 'taskItem' }) },
bullet_list: { block: MarkupNodeType.bullet_list },
todo_list: { block: MarkupNodeType.todoList },
todo_item: {
block: MarkupNodeType.todoItem,
getAttrs: (tok) => ({
checked: tokAttrGet(tok, 'checked') === 'true',
...todoItemMetaAttrsGet(tok)
})
},
ordered_list: {
block: MarkupNodeType.ordered_list,
getAttrs: (tok: Token) => ({ order: tokAttrGet(tok, 'start') ?? '1' })
},
task_list: {
block: MarkupNodeType.taskList,
getAttrs: (tok: Token) => ({ order: tokAttrGet(tok, 'start') ?? '1', 'data-type': 'taskList' })
},
heading: {
block: MarkupNodeType.heading,
getAttrs: (tok: Token) => ({ level: Number(tok.tag.slice(1)) })
},
code_block: {
block: MarkupNodeType.code_block,
getAttrs: (tok: Token) => {
return { language: tok.info ?? '' }
},
noCloseToken: true
},
fence: {
block: MarkupNodeType.code_block,
getAttrs: (tok: Token) => {
return { language: tok.info ?? '' }
},
noCloseToken: true
},
sub: {
block: MarkupNodeType.code_block,
getAttrs: (tok: Token) => {
return { language: tok.info ?? '' }
},
noCloseToken: false
},
table: {
block: MarkupNodeType.table,
noCloseToken: false
},
th: {
block: MarkupNodeType.table_header,
getAttrs: (tok: Token) => {
return {
colspan: Number(tok.attrGet('colspan') ?? '1'),
rowspan: Number(tok.attrGet('rowspan') ?? '1')
}
},
wrapContent: true,
noCloseToken: false
},
tr: {
block: MarkupNodeType.table_row,
noCloseToken: false
},
td: {
block: MarkupNodeType.table_cell,
getAttrs: (tok: Token) => {
return {
colspan: Number(tok.attrGet('colspan') ?? '1'),
rowspan: Number(tok.attrGet('rowspan') ?? '1')
}
},
wrapContent: true,
noCloseToken: false
}
}
const tokensNode: Record<string, ParsingNodeRule> = {
hr: { node: MarkupNodeType.horizontal_rule },
image: {
node: MarkupNodeType.image,
getAttrs: (tok: Token, state) => {
const result = tokToAttrs(tok, 'src', 'title', 'alt', 'data')
if (tok.content !== '' && (result.alt === '' || result.alt == null)) {
result.alt = tok.content
}
if (result.src.startsWith(state.imageUrl)) {
const url = new URL(result.src)
result['data-type'] = 'image'
const file = url.searchParams.get('file')
if (file != null) {
result['file-id'] = file
}
const width = url.searchParams.get('width')
if (width != null) {
result.width = width
}
const height = url.searchParams.get('height')
if (height != null) {
result.height = height
}
}
return result
}
},
hardbreak: { node: MarkupNodeType.hard_break }
}
const tokensMark: Record<string, ParsingMarkRule> = {
em: { mark: MarkupMarkType.em },
bold: { mark: MarkupMarkType.bold },
strong: { mark: MarkupMarkType.bold },
s: { mark: MarkupMarkType.strike },
u: { mark: MarkupMarkType.underline },
code_inline: {
mark: MarkupMarkType.code,
noCloseToken: true
}
}
const specialRule: Record<string, ParsingSpecialRule> = {
link: {
type: (state, tok) => {
const href = tok.attrGet('href')
if ((href?.startsWith(state.refUrl) ?? false) || state.stack[state.stack.length - 1]?.type === 'reference') {
return { type: MarkupNodeType.reference, node: true }
}
return { type: MarkupMarkType.link, node: false, close: true }
},
getAttrs: (tok: Token, state) => {
const attrs = tokToAttrs(tok, 'href', 'title')
if (attrs.href !== undefined) {
try {
const url = new URL(attrs.href)
if (attrs.href.startsWith(state.refUrl) ?? false) {
return {
label: url.searchParams?.get('label') ?? '',
id: url.searchParams?.get('_id') ?? '',
objectclass: url.searchParams?.get('_class') ?? ''
}
}
} catch (err: any) {
// ignore
}
}
return attrs
}
}
}
const ignoreRule: Record<string, ParsingIgnoreRule> = {
thead: {},
tbody: {}
}
export const isInlineToken = (token?: Token): boolean => token?.type === 'inline'
export const isParagraphToken = (token?: Token): boolean => token?.type === 'paragraph_open'
export const isListItemToken = (token?: Token): boolean => token?.type === 'list_item_open'
export interface TaskListEnv {
tasklists: number
}
interface TaskListStateCore extends StateCore {
env: TaskListEnv
}
// The leading whitespace in a list item (token.content) is already trimmed off by markdown-it.
// The regex below checks for '[ ] ' or '[x] ' or '[X] ' at the start of the string token.content,
// where the space is either a normal space or a non-breaking space (character 160 = \u00A0).
const startsWithTodoMarkdown = (token: Token): boolean => /^\[[xX \u00A0]\][ \u00A0]/.test(token.content)
const isCheckedTodoItem = (token: Token): boolean => /^\[[xX]\][ \u00A0]/.test(token.content)
const isTodoListItemInline = (tokens: Token[], index: number): boolean =>
isInlineToken(tokens[index]) &&
isParagraphToken(tokens[index - 1]) &&
isListItemToken(tokens[index - 2]) &&
startsWithTodoMarkdown(tokens[index])
export class MarkdownParser {
tokenizer: MarkdownIt
tokenHandlers: Record<string, (state: MarkdownParseState, tok: Token) => void>
constructor (
readonly extensions: Extensions,
readonly refUrl: string,
readonly imageUrl: string
) {
this.tokenizer = MarkdownIt('default', {
html: true
})
this.tokenizer.core.ruler.after('inline', 'task_list', this.taskListRule)
this.tokenHandlers = tokenHandlers(tokensBlock, tokensNode, tokensMark, specialRule, ignoreRule, extensions)
}
parse (text: string): MarkupNode {
const state = new MarkdownParseState(this.tokenHandlers, this.refUrl, this.imageUrl)
let doc: MarkupNode
const tokens = this.tokenizer.parse(text, {})
state.parseTokens(tokens)
do {
doc = state.closeNode()
} while (state.stack.length > 0)
return doc
}
taskListRule: RuleCore = (state: TaskListStateCore): boolean => {
const tokens = state.tokens
interface TodoListItemDescriptor {
start?: number
end?: number
}
let todoListStartIdx: number | undefined
let todoListItems: TodoListItemDescriptor[] = []
let todoListItem: TodoListItemDescriptor | undefined
let isTodoList = false
for (let i = 0; i < tokens.length; i++) {
if (tokens[i].type === 'bullet_list_open') {
todoListStartIdx = i
isTodoList = true
}
if (tokens[i].type === 'list_item_open') {
todoListItem = {
start: i
}
}
if (tokens[i].type === 'inline') {
if (todoListItem === undefined || !isTodoListItemInline(tokens, i)) {
isTodoList = false
}
}
if (tokens[i].type === 'list_item_close' && todoListItem !== undefined) {
todoListItem.end = i
if (isTodoList) {
todoListItems.push(todoListItem)
}
todoListItem = undefined
}
if (tokens[i].type === 'bullet_list_close') {
if (isTodoList && todoListStartIdx !== undefined) {
// Transform tokens
tokens[todoListStartIdx].type = 'todo_list_open'
tokens[i].type = 'todo_list_close'
for (const item of todoListItems) {
if (item.start !== undefined && item.end !== undefined) {
tokens[item.start].type = 'todo_item_open'
tokens[item.end].type = 'todo_item_close'
const inline = tokens[item.start + 2]
if (tokens[item.start].attrs == null) {
tokens[item.start].attrs = []
}
if (isCheckedTodoItem(inline)) {
;(tokens[item.start].attrs as any).push(['checked', 'true'])
}
if (inline.children !== null) {
const newContent = inline.children[0].content.slice(4)
if (newContent.length > 0) {
inline.children[0].content = newContent
} else {
inline.children = inline.children.slice(1)
}
const metaTok = inline.children.find(
(tok) => tok.type === 'html_inline' && tok.content.startsWith('<!--') && tok.content.endsWith('-->')
)
if (metaTok !== undefined) {
const metaValues = metaTok.content.slice(5, -4).split(',')
for (const mv of metaValues) {
if (mv.startsWith('todoid')) {
;(tokens[item.start].attrs as any).push(['todoid', mv.slice(7)])
}
if (mv.startsWith('userid')) {
;(tokens[item.start].attrs as any).push(['userid', mv.slice(7)])
}
}
}
}
}
}
}
todoListStartIdx = undefined
todoListItems = []
isTodoList = false
}
}
return true
}
}

View File

@ -0,0 +1,727 @@
import { generateHTML } from '@tiptap/html'
import { isInSet, markEq } from './marks'
import { messageContent, nodeAttrs } from './node'
import { MarkupMark, MarkupNode, MarkupNodeType } from '../markup/model'
import { defaultExtensions } from '../extensions'
type FirstDelim = (i: number, attrs?: Record<string, any>) => string
interface IState {
wrapBlock: (delim: string, firstDelim: string | null, node: MarkupNode, f: () => void) => void
flushClose: (size: number) => void
atBlank: () => void
ensureNewLine: () => void
write: (content: string) => void
closeBlock: (node: any) => void
text: (text: string, escape?: boolean) => void
render: (node: MarkupNode, parent: MarkupNode, index: number) => void
renderContent: (parent: MarkupNode) => void
renderInline: (parent: MarkupNode) => void
renderList: (node: MarkupNode, delim: string, firstDelim: FirstDelim) => void
esc: (str: string, startOfLine?: boolean) => string
quote: (str: string) => string
repeat: (str: string, n: number) => string
markString: (mark: MarkupMark, open: boolean, parent: MarkupNode, index: number) => string
refUrl: string
imageUrl: string
inAutolink?: boolean
renderAHref?: boolean
}
type NodeProcessor = (state: IState, node: MarkupNode, parent: MarkupNode, index: number) => void
interface InlineState {
active: MarkupMark[]
trailing: string
parent: MarkupNode
node?: MarkupNode
marks: MarkupMark[]
}
// *************************************************************
function backticksFor (side: boolean): string {
return side ? '`' : '`'
}
function isPlainURL (link: MarkupMark, parent: MarkupNode, index: number): boolean {
if (link.attrs.title !== undefined || !/^\w+:/.test(link.attrs.href)) return false
const content = parent.content?.[index]
if (content === undefined) {
return false
}
if (
content.type !== MarkupNodeType.text ||
content.text !== link.attrs.href ||
content.marks?.[content.marks.length - 1] !== link
) {
return false
}
return index === (parent.content?.length ?? 0) - 1 || !isInSet(link, parent.content?.[index + 1]?.marks ?? [])
}
const formatTodoItem: FirstDelim = (i, attrs) => {
const meta =
attrs?.todoid !== undefined && attrs?.userid !== undefined
? `<!-- todoid=${attrs?.todoid},userid=${attrs?.userid} -->`
: ''
return `* [${attrs?.checked === true ? 'x' : ' '}] ${meta}`
}
// *************************************************************
export const storeNodes: Record<string, NodeProcessor> = {
blockquote: (state, node) => {
state.wrapBlock('> ', null, node, () => {
state.renderContent(node)
})
},
codeBlock: (state, node) => {
state.write('```' + `${nodeAttrs(node).language ?? ''}` + '\n')
// TODO: Check for node.textContent
state.renderInline(node)
// state.text(node.text ?? '', false)
state.ensureNewLine()
state.write('```')
state.closeBlock(node)
},
heading: (state, node) => {
const attrs = nodeAttrs(node)
state.write(state.repeat('#', attrs.level !== undefined ? Number(attrs.level) : 1) + ' ')
state.renderInline(node)
state.closeBlock(node)
},
horizontalRule: (state, node) => {
state.write(`${nodeAttrs(node).markup ?? '---'}`)
state.closeBlock(node)
},
bulletList: (state, node) => {
state.renderList(node, ' ', () => `${nodeAttrs(node).bullet ?? '*'}` + ' ')
},
taskList: (state, node) => {
state.renderList(node, ' ', () => '* [ ]' + ' ')
},
todoList: (state, node) => {
state.renderList(node, ' ', formatTodoItem)
},
orderedList: (state, node) => {
let start = 1
if (nodeAttrs(node).order !== undefined) {
start = Number(nodeAttrs(node).order)
}
const maxW = String(start + messageContent(node).length - 1).length
const space = state.repeat(' ', maxW + 2)
state.renderList(node, space, (i: number) => {
const nStr = String(start + i)
return state.repeat(' ', maxW - nStr.length) + nStr + '. '
})
},
listItem: (state, node) => {
state.renderContent(node)
},
taskItem: (state, node) => {
state.renderContent(node)
},
todoItem: (state, node) => {
state.renderContent(node)
},
paragraph: (state, node) => {
state.renderInline(node)
state.closeBlock(node)
},
subLink: (state, node) => {
state.write('<sub>')
state.renderAHref = true
state.renderInline(node)
state.renderAHref = false
state.write('</sub>')
},
image: (state, node) => {
const attrs = nodeAttrs(node)
if (attrs['file-id'] != null) {
// Convert image to fileid format
state.write(
'![' +
state.esc(`${attrs.alt ?? ''}`) +
'](' +
(state.imageUrl +
`${attrs['file-id']}` +
(attrs.width != null ? '&width=' + state.esc(`${attrs.width}`) : '') +
(attrs.height != null ? '&height=' + state.esc(`${attrs.height}`) : '')) +
(attrs.title != null ? ' ' + state.quote(`${attrs.title}`) : '') +
')'
)
} else {
if (attrs.width != null || attrs.height != null) {
// state.write(`<img width="446" alt="{alt}" src="{src}">`)
state.write(
'<img' +
(attrs.width != null ? ` width="${state.esc(`${attrs.width}`)}"` : '') +
(attrs.height != null ? ` height="${state.esc(`${attrs.height}`)}"` : '') +
` src="${state.esc(`${attrs.src}`)}"` +
(attrs.alt != null ? ` alt="${state.esc(`${attrs.alt}`)}"` : '') +
(attrs.title != null ? '>' + state.quote(`${attrs.title}`) + '</img>' : '>')
)
} else {
state.write(
'![' +
state.esc(`${attrs.alt ?? ''}`) +
'](' +
state.esc(`${attrs.src}`) +
(attrs.title != null ? ' ' + state.quote(`${attrs.title}`) : '') +
')'
)
}
}
},
reference: (state, node) => {
const attrs = nodeAttrs(node)
let url = state.refUrl
if (!url.includes('?')) {
url += '?'
} else {
url += '&'
}
state.write(
'[' +
state.esc(`${attrs.label ?? ''}`) +
'](' +
`${url}${makeQuery({
_class: attrs.objectclass,
_id: attrs.id,
label: attrs.label
})}` +
(attrs.title !== undefined ? ' ' + state.quote(`${attrs.title}`) : '') +
')'
)
},
hardBreak: (state, node, parent, index) => {
const content = messageContent(parent)
for (let i = index + 1; i < content.length; i++) {
if (content[i].type !== node.type) {
state.write('\\\n')
return
}
}
},
text: (state, node) => {
// Check if test has reference mark, in this case we need to remove [[]]
state.text(node.text ?? '')
},
table: (state, node) => {
const html = generateHTML(node, defaultExtensions)
state.write('<table><tbody>' + html + '</tbody></table>')
state.closeBlock(node)
}
}
interface MarkProcessor {
open: ((_state: IState, mark: MarkupMark, parent: MarkupNode, index: number) => string) | string
close: ((_state: IState, mark: MarkupMark, parent: MarkupNode, index: number) => string) | string
mixable: boolean
expelEnclosingWhitespace: boolean
escape: boolean
}
export const storeMarks: Record<string, MarkProcessor> = {
em: {
open: '*',
close: '*',
mixable: true,
expelEnclosingWhitespace: true,
escape: true
},
italic: {
open: '*',
close: '*',
mixable: true,
expelEnclosingWhitespace: true,
escape: true
},
bold: {
open: '**',
close: '**',
mixable: true,
expelEnclosingWhitespace: true,
escape: true
},
strong: {
open: '**',
close: '**',
mixable: true,
expelEnclosingWhitespace: true,
escape: true
},
strike: {
open: '~~',
close: '~~',
mixable: true,
expelEnclosingWhitespace: true,
escape: true
},
underline: {
open: '<ins>',
close: '</ins>',
mixable: true,
expelEnclosingWhitespace: true,
escape: true
},
link: {
open: (state, mark, parent, index) => {
if (state.renderAHref === true) {
return `<a href="${encodeURI(mark.attrs.href)}">`
} else {
state.inAutolink = isPlainURL(mark, parent, index)
return state.inAutolink ? '<' : '['
}
},
close: (state, mark, parent, index) => {
if (state.renderAHref === true) {
return '</a>'
} else {
const { inAutolink } = state
state.inAutolink = undefined
return inAutolink === true
? '>'
: '](' +
// eslint-disable-next-line
(mark.attrs.href as string).replace(/[\(\)"]/g, '\\$&') +
(mark.attrs.title !== undefined ? ` "${(mark.attrs.title as string).replace(/"/g, '\\"')}"` : '') +
')'
}
},
mixable: false,
expelEnclosingWhitespace: false,
escape: true
},
code: {
open: (state, mark, parent, index) => {
return backticksFor(false)
},
close: (state, mark, parent, index) => {
return backticksFor(true)
},
mixable: false,
expelEnclosingWhitespace: false,
escape: false
}
}
export interface StateOptions {
tightLists: boolean
refUrl: string
imageUrl: string
}
export class MarkdownState implements IState {
nodes: Record<string, NodeProcessor>
marks: Record<string, MarkProcessor>
delim: string
out: string
closed: boolean
closedNode?: MarkupNode
inTightList: boolean
options: StateOptions
refUrl: string
imageUrl: string
constructor (
nodes = storeNodes,
marks = storeMarks,
options: StateOptions = { tightLists: true, refUrl: 'ref://', imageUrl: 'http://' }
) {
this.nodes = nodes
this.marks = marks
this.delim = this.out = ''
this.closed = false
this.inTightList = false
this.refUrl = options.refUrl
this.imageUrl = options.imageUrl
this.options = options
}
flushClose (size: number): void {
if (this.closed) {
if (!this.atBlank()) this.out += '\n'
if (size > 1) {
this.addDelim(size)
}
this.closed = false
}
}
private addDelim (size: number): void {
let delimMin = this.delim
const trim = /\s+$/.exec(delimMin)
if (trim !== null) {
delimMin = delimMin.slice(0, delimMin.length - trim[0].length)
}
for (let i = 1; i < size; i++) {
this.out += delimMin + '\n'
}
}
wrapBlock (delim: string, firstDelim: string | null, node: MarkupNode, f: () => void): void {
const old = this.delim
this.write(firstDelim ?? delim)
this.delim += delim
f()
this.delim = old
this.closeBlock(node)
}
atBlank (): boolean {
return /(^|\n)$/.test(this.out)
}
// :: ()
// Ensure the current content ends with a newline.
ensureNewLine (): void {
if (!this.atBlank()) this.out += '\n'
}
// :: (?string)
// Prepare the state for writing output (closing closed paragraphs,
// adding delimiters, and so on), and then optionally add content
// (unescaped) to the output.
write (content: string): void {
this.flushClose(2)
if (this.delim !== undefined && this.atBlank()) this.out += this.delim
if (content.length > 0) this.out += content
}
// :: (Node)
// Close the block for the given node.
closeBlock (node: MarkupNode): void {
this.closedNode = node
this.closed = true
}
// :: (string, ?bool)
// Add the given text to the document. When escape is not `false`,
// it will be escaped.
text (text: string, escape = false): void {
const lines = text.split('\n')
for (let i = 0; i < lines.length; i++) {
const startOfLine = this.atBlank() || this.closed
this.write('')
this.out += escape ? this.esc(lines[i], startOfLine) : lines[i]
if (i !== lines.length - 1) this.out += '\n'
}
}
// :: (Node)
// Render the given node as a block.
render (node: MarkupNode, parent: MarkupNode, index: number): void {
if (this.nodes[node.type] === undefined) {
throw new Error('Token type `' + node.type + '` not supported by Markdown renderer')
}
this.nodes[node.type](this, node, parent, index)
}
// :: (Node)
// Render the contents of `parent` as block nodes.
renderContent (parent: MarkupNode): void {
messageContent(parent).forEach((node: MarkupNode, i: number) => {
this.render(node, parent, i)
})
}
reorderMixableMark (state: InlineState, mark: MarkupMark, i: number, len: number): void {
for (let j = 0; j < state.active.length; j++) {
const other = state.active[j]
if (!this.marks[other.type].mixable || this.checkSwitchMarks(i, j, state, mark, other, len)) {
break
}
}
}
reorderMixableMarks (state: InlineState, len: number): void {
// Try to reorder 'mixable' marks, such as em and strong, which
// in Markdown may be opened and closed in different order, so
// that order of the marks for the token matches the order in
// active.
for (let i = 0; i < len; i++) {
const mark = state.marks[i]
if (!this.marks[mark.type].mixable) break
this.reorderMixableMark(state, mark, i, len)
}
}
private checkSwitchMarks (
i: number,
j: number,
state: InlineState,
mark: MarkupMark,
other: MarkupMark,
len: number
): boolean {
if (!markEq(mark, other) || i === j) {
return false
}
this.switchMarks(i, j, state, mark, len)
return true
}
private switchMarks (i: number, j: number, state: InlineState, mark: MarkupMark, len: number): void {
if (i > j) {
state.marks = state.marks
.slice(0, j)
.concat(mark)
.concat(state.marks.slice(j, i))
.concat(state.marks.slice(i + 1, len))
}
if (j > i) {
state.marks = state.marks
.slice(0, i)
.concat(state.marks.slice(i + 1, j))
.concat(mark)
.concat(state.marks.slice(j, len))
}
}
renderNodeInline (state: InlineState, index: number): void {
state.marks = state.node?.marks ?? []
this.updateHardBreakMarks(state, index)
const leading = this.adjustLeading(state)
const inner: MarkupMark | undefined = state.marks.length > 0 ? state.marks[state.marks.length - 1] : undefined
const noEsc = inner !== undefined && !(this.marks[inner.type]?.escape ?? false)
const len = state.marks.length - (noEsc ? 1 : 0)
this.reorderMixableMarks(state, len)
// Find the prefix of the mark set that didn't change
this.checkCloseMarks(state, len, index)
// Output any previously expelled trailing whitespace outside the marks
if (leading !== '') this.text(leading)
// Open the marks that need to be opened
this.checkOpenMarks(state, len, index, inner, noEsc)
}
private checkOpenMarks (
state: InlineState,
len: number,
index: number,
inner: MarkupMark | undefined,
noEsc: boolean
): void {
if (state.node !== undefined) {
this.updateActiveMarks(state, len, index)
// Render the node. Special case code marks, since their content
// may not be escaped.
if (this.isNoEscapeRequire(state.node, inner, noEsc, state)) {
this.renderMarkText(inner as MarkupMark, state, index)
} else {
this.render(state.node, state.parent, index)
}
}
}
private isNoEscapeRequire (
node: MarkupNode,
inner: MarkupMark | undefined,
noEsc: boolean,
state: InlineState
): boolean {
return inner !== undefined && noEsc && node.type === MarkupNodeType.text
}
private renderMarkText (inner: MarkupMark, state: InlineState, index: number): void {
this.text(
this.markString(inner, true, state.parent, index) +
(state.node?.text as string) +
this.markString(inner, false, state.parent, index + 1),
false
)
}
private updateActiveMarks (state: InlineState, len: number, index: number): void {
while (state.active.length < len) {
const add = state.marks[state.active.length]
state.active.push(add)
this.text(this.markString(add, true, state.parent, index), false)
}
}
private checkCloseMarks (state: InlineState, len: number, index: number): void {
let keep = 0
while (keep < Math.min(state.active.length, len) && markEq(state.marks[keep], state.active[keep])) {
++keep
}
// Close the marks that need to be closed
while (keep < state.active.length) {
const mark = state.active.pop()
if (mark !== undefined) {
this.text(this.markString(mark, false, state.parent, index), false)
}
}
}
private adjustLeading (state: InlineState): string {
let leading = state.trailing
state.trailing = ''
// If whitespace has to be expelled from the node, adjust
// leading and trailing accordingly.
const node = state?.node
if (this.isText(node) && this.isMarksHasExpelEnclosingWhitespace(state)) {
const match = /^(\s*)(.*?)(\s*)$/m.exec(node?.text ?? '')
if (match !== null) {
const [leadMatch, innerMatch, trailMatch] = [match[1], match[2], match[3]]
leading += leadMatch
state.trailing = trailMatch
this.adjustLeadingTextNode(leadMatch, trailMatch, state, innerMatch, node as MarkupNode)
}
}
return leading
}
private isMarksHasExpelEnclosingWhitespace (state: InlineState): boolean {
return state.marks.some((mark) => this.marks[mark.type]?.expelEnclosingWhitespace)
}
private adjustLeadingTextNode (
lead: string,
trail: string,
state: InlineState,
inner: string,
node: MarkupNode
): void {
if (lead !== '' || trail !== '') {
state.node = inner !== undefined ? { ...node, text: inner } : undefined
if (state.node === undefined) {
state.marks = state.active
}
}
}
private updateHardBreakMarks (state: InlineState, index: number): void {
if (state.node !== undefined && state.node.type === MarkupNodeType.hard_break) {
state.marks = this.filterHardBreakMarks(state.marks, index, state)
}
}
private filterHardBreakMarks (marks: MarkupMark[], index: number, state: InlineState): MarkupMark[] {
const content = state.parent.content ?? []
const next = content[index + 1]
if (!this.isHardbreakText(next)) {
return []
}
return marks.filter((m) => isInSet(m, next.marks ?? []))
}
private isHardbreakText (next?: MarkupNode): boolean {
return (
next !== undefined && (next.type !== MarkupNodeType.text || (next.text !== undefined && /\S/.test(next.text)))
)
}
private isText (node?: MarkupNode): boolean {
return node !== undefined && node.type === MarkupNodeType.text && node.text !== undefined
}
// :: (Node)
// Render the contents of `parent` as inline content.
renderInline (parent: MarkupNode): void {
const state: InlineState = { active: [], trailing: '', parent, marks: [] }
messageContent(parent).forEach((nde, index) => {
state.node = nde
this.renderNodeInline(state, index)
})
state.node = undefined
this.renderNodeInline(state, 0)
}
// :: (Node, string, (number) → string)
// Render a node's content as a list. `delim` should be the extra
// indentation added to all lines except the first in an item,
// `firstDelim` is a function going from an item index to a
// delimiter for the first line of the item.
renderList (node: MarkupNode, delim: string, firstDelim: FirstDelim): void {
this.flushListClose(node)
const isTight: boolean =
typeof node.attrs?.tight !== 'undefined' ? node.attrs.tight === 'true' : this.options.tightLists
const prevTight = this.inTightList
this.inTightList = isTight
messageContent(node).forEach((child, i) => {
this.renderListItem(node, child, i, isTight, delim, firstDelim)
})
this.inTightList = prevTight
}
renderListItem (
node: MarkupNode,
child: MarkupNode,
i: number,
isTight: boolean,
delim: string,
firstDelim: FirstDelim
): void {
if (i > 0 && isTight) this.flushClose(1)
this.wrapBlock(delim, firstDelim(i, node.content?.[i].attrs), node, () => {
this.render(child, node, i)
})
}
private flushListClose (node: MarkupNode): void {
if (this.closed && this.closedNode?.type === node.type) {
this.flushClose(3)
} else if (this.inTightList) {
this.flushClose(1)
}
}
// :: (string, ?bool) → string
// Escape the given string so that it can safely appear in Markdown
// content. If `startOfLine` is true, also escape characters that
// has special meaning only at the start of the line.
esc (str: string, startOfLine = false): string {
if (str == null) {
return ''
}
str = str.replace(/[`*\\~\[\]]/g, '\\$&') // eslint-disable-line
if (startOfLine) {
str = str.replace(/^[:#\-*+]/, '\\$&').replace(/^(\d+)\./, '$1\\.')
}
return str
}
quote (str: string): string {
const wrap = !(str?.includes('"') ?? false) ? '""' : !(str?.includes("'") ?? false) ? "''" : '()'
return wrap[0] + str + wrap[1]
}
// :: (string, number) → string
// Repeat the given string `n` times.
repeat (str: string, n: number): string {
let out = ''
for (let i = 0; i < n; i++) out += str
return out
}
// : (Mark, bool, string?) → string
// Get the markdown string for a given opening or closing mark.
markString (mark: MarkupMark, open: boolean, parent: MarkupNode, index: number): string {
const info = this.marks[mark.type]
const value = open ? info.open : info.close
return typeof value === 'string' ? value : value(this, mark, parent, index) ?? ''
}
}
function makeQuery (obj: Record<string, string | number | boolean | undefined>): string {
return Object.keys(obj)
.filter((it) => it[1] != null)
.map(function (k) {
return encodeURIComponent(k) + '=' + encodeURIComponent(obj[k] as string | number | boolean)
})
.join('&')
}

View File

@ -18,6 +18,7 @@ import { Node, Schema } from 'prosemirror-model'
import { prosemirrorJSONToYDoc, yDocToProsemirrorJSON } from 'y-prosemirror' import { prosemirrorJSONToYDoc, yDocToProsemirrorJSON } from 'y-prosemirror'
import { Doc, applyUpdate, encodeStateAsUpdate } from 'yjs' import { Doc, applyUpdate, encodeStateAsUpdate } from 'yjs'
import { defaultExtensions } from './extensions' import { defaultExtensions } from './extensions'
import { MarkupNode } from './markup/model'
/** /**
* Get ProseMirror node from Y.Doc content * Get ProseMirror node from Y.Doc content
@ -114,3 +115,20 @@ export function updateYDocContent (
console.error(err) console.error(err)
} }
} }
/**
* Create Y.Doc
*
* @public
*/
export function YDocFromContent (content: MarkupNode, field: string, schema?: Schema, extensions?: Extensions): Doc {
schema ??= getSchema(extensions ?? defaultExtensions)
const res = new Doc({ gc: false })
const yDoc = prosemirrorJSONToYDoc(schema, content, field)
const update = encodeStateAsUpdate(yDoc)
applyUpdate(res, update)
return res
}

View File

@ -95,6 +95,10 @@ export function serveAccount (
if (initWS !== undefined) { if (initWS !== undefined) {
setMetadata(toolPlugin.metadata.InitWorkspace, initWS) setMetadata(toolPlugin.metadata.InitWorkspace, initWS)
} }
const initScriptUrl = process.env.INIT_SCRIPT_URL
if (initScriptUrl !== undefined) {
setMetadata(toolPlugin.metadata.InitScriptURL, initScriptUrl)
}
setMetadata(toolPlugin.metadata.Endpoint, endpointUri) setMetadata(toolPlugin.metadata.Endpoint, endpointUri)
setMetadata(toolPlugin.metadata.Transactor, transactorUri) setMetadata(toolPlugin.metadata.Transactor, transactorUri)
setMetadata(toolPlugin.metadata.UserAgent, 'AccountService') setMetadata(toolPlugin.metadata.UserAgent, 'AccountService')

View File

@ -51,6 +51,9 @@
"@hcengineering/server-tool": "^0.6.0", "@hcengineering/server-tool": "^0.6.0",
"@hcengineering/server-token": "^0.6.11", "@hcengineering/server-token": "^0.6.11",
"@hcengineering/analytics": "^0.6.0", "@hcengineering/analytics": "^0.6.0",
"@hcengineering/server-storage": "^0.6.0",
"@hcengineering/server-core": "^0.6.1",
"@hcengineering/server-pipeline": "^0.6.0",
"@hcengineering/model-all": "^0.6.0", "@hcengineering/model-all": "^0.6.0",
"node-fetch": "^2.6.6" "node-fetch": "^2.6.6"
} }

View File

@ -24,6 +24,7 @@ import contact, {
Person, Person,
PersonAccount PersonAccount
} from '@hcengineering/contact' } from '@hcengineering/contact'
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
import core, { import core, {
AccountRole, AccountRole,
BaseWorkspaceInfo, BaseWorkspaceInfo,
@ -43,16 +44,32 @@ import core, {
Version, Version,
versionToString, versionToString,
WorkspaceId, WorkspaceId,
WorkspaceIdWithUrl,
type Branding type Branding
} from '@hcengineering/core' } from '@hcengineering/core'
import { consoleModelLogger, MigrateOperation, ModelLogger } from '@hcengineering/model' import { consoleModelLogger, MigrateOperation, ModelLogger } from '@hcengineering/model'
import platform, { getMetadata, PlatformError, Severity, Status, translate } from '@hcengineering/platform' import platform, { getMetadata, PlatformError, Severity, Status, translate } from '@hcengineering/platform'
import { decodeToken, generateToken } from '@hcengineering/server-token' import { decodeToken, generateToken } from '@hcengineering/server-token'
import toolPlugin, { connect, initializeWorkspace, initModel, upgradeModel } from '@hcengineering/server-tool' import toolPlugin, {
connect,
initializeWorkspace,
initModel,
updateModel,
prepareTools,
upgradeModel
} from '@hcengineering/server-tool'
import { pbkdf2Sync, randomBytes } from 'crypto' import { pbkdf2Sync, randomBytes } from 'crypto'
import { Binary, Db, Filter, ObjectId, type MongoClient } from 'mongodb' import { Binary, Db, Filter, ObjectId, type MongoClient } from 'mongodb'
import fetch from 'node-fetch' import fetch from 'node-fetch'
import { type StorageAdapter } from '../../core/types' import {
DummyFullTextAdapter,
Pipeline,
PipelineFactory,
SessionContextImpl,
StorageConfiguration,
type StorageAdapter
} from '@hcengineering/server-core'
import { createIndexStages, createServerPipeline } from '@hcengineering/server-pipeline'
import { accountPlugin } from './plugin' import { accountPlugin } from './plugin'
const WORKSPACE_COLLECTION = 'workspace' const WORKSPACE_COLLECTION = 'workspace'
@ -940,17 +957,72 @@ export async function createWorkspace (
} }
const model: Tx[] = [] const model: Tx[] = []
try { try {
const wsUrl: WorkspaceIdWithUrl = {
name: workspaceInfo.workspace,
productId: workspaceInfo.productId,
workspaceName: workspaceInfo.workspaceName ?? '',
workspaceUrl: workspaceInfo.workspaceUrl ?? ''
}
const wsId = getWorkspaceId(workspaceInfo.workspace, productId) const wsId = getWorkspaceId(workspaceInfo.workspace, productId)
await childLogger.withLog('init-workspace', {}, async (ctx) => { await childLogger.withLog('init-workspace', {}, async (ctx) => {
await initModel(ctx, getTransactor(), wsId, txes, migrationOperation, ctxModellogger, async (value) => { await initModel(ctx, wsId, txes, ctxModellogger, async (value) => {
await updateInfo({ createProgress: 10 + Math.round((Math.min(value, 100) / 100) * 20) }) await updateInfo({ createProgress: 10 + Math.round((Math.min(value, 100) / 100) * 10) })
}) })
}) })
await initializeWorkspace(ctx, branding, getTransactor(), wsId, ctxModellogger, async (value) => { const { mongodbUri } = prepareTools([])
await updateInfo({ createProgress: 30 + Math.round((Math.min(value, 100) / 100) * 65) })
}) const storageConfig: StorageConfiguration = storageConfigFromEnv()
const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri)
try {
const factory: PipelineFactory = createServerPipeline(
ctx,
mongodbUri,
{
externalStorage: storageAdapter,
fullTextUrl: 'http://localost:9200',
indexParallel: 0,
indexProcessing: 0,
rekoniUrl: '',
usePassedCtx: true
},
{
fulltextAdapter: {
factory: async () => new DummyFullTextAdapter(),
url: '',
stages: (adapter, storage, storageAdapter, contentAdapter) =>
createIndexStages(
ctx.newChild('stages', {}),
wsUrl,
branding,
adapter,
storage,
storageAdapter,
contentAdapter,
0,
0
)
}
}
)
const pipeline = await factory(ctx, wsUrl, true, () => {}, null)
const client = new TxOperations(wrapPipeline(ctx, pipeline, wsUrl), core.account.System)
await updateModel(ctx, wsId, migrationOperation, client, ctxModellogger, async (value) => {
await updateInfo({ createProgress: 20 + Math.round((Math.min(value, 100) / 100) * 10) })
})
await initializeWorkspace(ctx, branding, wsUrl, storageAdapter, client, ctxModellogger, async (value) => {
await updateInfo({ createProgress: 30 + Math.round((Math.min(value, 100) / 100) * 70) })
})
await pipeline.close()
} finally {
await storageAdapter.close()
}
} catch (err: any) { } catch (err: any) {
Analytics.handleError(err) Analytics.handleError(err)
return { workspaceInfo, err, client: null as any } return { workspaceInfo, err, client: null as any }
@ -969,6 +1041,47 @@ export async function createWorkspace (
}) })
} }
function wrapPipeline (ctx: MeasureContext, pipeline: Pipeline, wsUrl: WorkspaceIdWithUrl): Client {
const sctx = new SessionContextImpl(
ctx,
systemAccountEmail,
'backup',
true,
{ targets: {}, txes: [] },
wsUrl,
null,
false
)
return {
findAll: async (_class, query, options) => {
return await pipeline.findAll(sctx, _class, query, options)
},
findOne: async (_class, query, options) => {
return (await pipeline.findAll(sctx, _class, query, { ...options, limit: 1 })).shift()
},
close: async () => {
await pipeline.close()
},
getHierarchy: () => {
return pipeline.storage.hierarchy
},
getModel: () => {
return pipeline.storage.modelDb
},
searchFulltext: async (query, options) => {
return {
docs: [],
total: 0
}
},
tx: async (tx) => {
return await pipeline.tx(sctx, tx)
},
notify: (...tx) => {}
}
}
/** /**
* @public * @public
*/ */

View File

@ -36,7 +36,8 @@
"@types/ws": "^8.5.3", "@types/ws": "^8.5.3",
"jest": "^29.7.0", "jest": "^29.7.0",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"@types/jest": "^29.5.5" "@types/jest": "^29.5.5",
"@types/js-yaml": "^4.0.9"
}, },
"dependencies": { "dependencies": {
"mongodb": "^6.8.0", "mongodb": "^6.8.0",
@ -54,7 +55,10 @@
"@hcengineering/server": "^0.6.4", "@hcengineering/server": "^0.6.4",
"@hcengineering/server-storage": "^0.6.0", "@hcengineering/server-storage": "^0.6.0",
"@hcengineering/mongo": "^0.6.1", "@hcengineering/mongo": "^0.6.1",
"@hcengineering/collaboration": "^0.6.0",
"@hcengineering/minio": "^0.6.0", "@hcengineering/minio": "^0.6.0",
"fast-equals": "^5.0.1" "fast-equals": "^5.0.1",
"@hcengineering/text": "^0.6.5",
"js-yaml": "^4.1.0"
} }
} }

View File

@ -31,32 +31,37 @@ import core, {
systemAccountEmail, systemAccountEmail,
toWorkspaceString, toWorkspaceString,
Tx, Tx,
TxOperations,
WorkspaceId, WorkspaceId,
WorkspaceIdWithUrl,
type Doc, type Doc,
type TxCUD type TxCUD
} from '@hcengineering/core' } from '@hcengineering/core'
import { consoleModelLogger, MigrateOperation, ModelLogger } from '@hcengineering/model' import { consoleModelLogger, MigrateOperation, ModelLogger } from '@hcengineering/model'
import { createMongoTxAdapter, DBCollectionHelper, getMongoClient, getWorkspaceDB } from '@hcengineering/mongo' import { createMongoTxAdapter, DBCollectionHelper, getMongoClient, getWorkspaceDB } from '@hcengineering/mongo'
import { DomainIndexHelperImpl, StorageAdapter, StorageConfiguration } from '@hcengineering/server-core' import {
AggregatorStorageAdapter,
DomainIndexHelperImpl,
StorageAdapter,
StorageConfiguration
} from '@hcengineering/server-core'
import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage' import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage'
import { Db, Document } from 'mongodb' import { Db, Document } from 'mongodb'
import { connect } from './connect' import { connect } from './connect'
import { createWorkspaceData, InitScript } from './initializer' import { InitScript, WorkspaceInitializer } from './initializer'
import toolPlugin from './plugin' import toolPlugin from './plugin'
import { MigrateClientImpl } from './upgrade' import { MigrateClientImpl } from './upgrade'
import { getMetadata } from '@hcengineering/platform' import { getMetadata } from '@hcengineering/platform'
import { generateToken } from '@hcengineering/server-token' import { generateToken } from '@hcengineering/server-token'
import fs from 'fs' import fs from 'fs'
import * as yaml from 'js-yaml'
import path from 'path' import path from 'path'
export * from './connect' export * from './connect'
export * from './plugin' export * from './plugin'
export { toolPlugin as default } export { toolPlugin as default }
export const CONFIG_DB = '%config'
const scriptsCol = 'initScripts'
export class FileModelLogger implements ModelLogger { export class FileModelLogger implements ModelLogger {
handle: fs.WriteStream handle: fs.WriteStream
constructor (readonly file: string) { constructor (readonly file: string) {
@ -102,10 +107,8 @@ export function prepareTools (rawTxes: Tx[]): {
*/ */
export async function initModel ( export async function initModel (
ctx: MeasureContext, ctx: MeasureContext,
transactorUrl: string,
workspaceId: WorkspaceId, workspaceId: WorkspaceId,
rawTxes: Tx[], rawTxes: Tx[],
migrateOperations: [string, MigrateOperation][],
logger: ModelLogger = consoleModelLogger, logger: ModelLogger = consoleModelLogger,
progress: (value: number) => Promise<void> progress: (value: number) => Promise<void>
): Promise<void> { ): Promise<void> {
@ -116,7 +119,6 @@ export async function initModel (
const _client = getMongoClient(mongodbUri) const _client = getMongoClient(mongodbUri)
const client = await _client.getClient() const client = await _client.getClient()
let connection: (CoreClient & BackupClient) | undefined
const storageConfig: StorageConfiguration = storageConfigFromEnv() const storageConfig: StorageConfiguration = storageConfigFromEnv()
const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri) const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri)
try { try {
@ -126,67 +128,71 @@ export async function initModel (
const result = await db.collection(DOMAIN_TX).insertMany(txes as Document[]) const result = await db.collection(DOMAIN_TX).insertMany(txes as Document[])
logger.log('model transactions inserted.', { count: result.insertedCount }) logger.log('model transactions inserted.', { count: result.insertedCount })
await progress(10) await progress(30)
logger.log('creating data...', { transactorUrl }) logger.log('creating data...', { workspaceId })
const { model } = await fetchModelFromMongo(ctx, mongodbUri, workspaceId)
await progress(20) await progress(60)
logger.log('create minio bucket', { workspaceId }) logger.log('create minio bucket', { workspaceId })
await storageAdapter.make(ctx, workspaceId) await storageAdapter.make(ctx, workspaceId)
await progress(100)
logger.log('connecting to transactor', { workspaceId, transactorUrl })
connection = (await connect(
transactorUrl,
workspaceId,
undefined,
{
model: 'upgrade',
admin: 'true'
},
model
)) as unknown as CoreClient & BackupClient
const states = await connection.findAll<MigrationState>(core.class.MigrationState, {})
const sts = Array.from(groupByArray(states, (it) => it.plugin).entries())
const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))]))
try {
let i = 0
for (const op of migrateOperations) {
logger.log('Migrate', { name: op[0] })
await op[1].upgrade(migrateState, async () => connection as any, logger)
i++
await progress(20 + (((100 / migrateOperations.length) * i) / 100) * 10)
}
await progress(30)
// Create update indexes
await createUpdateIndexes(
ctx,
connection,
db,
logger,
async (value) => {
await progress(30 + (Math.min(value, 100) / 100) * 70)
},
workspaceId
)
await progress(100)
} catch (e: any) {
logger.error('error', { error: e })
throw e
}
} catch (err: any) { } catch (err: any) {
ctx.error('Failed to create workspace', { error: err }) ctx.error('Failed to create workspace', { error: err })
throw err throw err
} finally { } finally {
await storageAdapter.close() await storageAdapter.close()
await connection?.sendForceClose() _client.close()
await connection?.close() }
}
export async function updateModel (
ctx: MeasureContext,
workspaceId: WorkspaceId,
migrateOperations: [string, MigrateOperation][],
connection: TxOperations,
logger: ModelLogger = consoleModelLogger,
progress: (value: number) => Promise<void>
): Promise<void> {
logger.log('connecting to transactor', { workspaceId })
const states = await connection.findAll<MigrationState>(core.class.MigrationState, {})
const sts = Array.from(groupByArray(states, (it) => it.plugin).entries())
const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))]))
const { mongodbUri } = prepareTools([])
const _client = getMongoClient(mongodbUri)
const client = await _client.getClient()
try {
const db = getWorkspaceDB(client, workspaceId)
let i = 0
for (const op of migrateOperations) {
logger.log('Migrate', { name: op[0] })
await op[1].upgrade(migrateState, async () => connection as any, logger)
i++
await progress((((100 / migrateOperations.length) * i) / 100) * 30)
}
// Create update indexes
await createUpdateIndexes(
ctx,
connection,
db,
logger,
async (value) => {
await progress(30 + (Math.min(value, 100) / 100) * 70)
},
workspaceId
)
await progress(100)
} catch (e: any) {
logger.error('error', { error: e })
throw e
} finally {
_client.close() _client.close()
} }
} }
@ -197,24 +203,20 @@ export async function initModel (
export async function initializeWorkspace ( export async function initializeWorkspace (
ctx: MeasureContext, ctx: MeasureContext,
branding: Branding | null, branding: Branding | null,
transactorUrl: string, wsUrl: WorkspaceIdWithUrl,
workspaceId: WorkspaceId, storageAdapter: AggregatorStorageAdapter,
client: TxOperations,
logger: ModelLogger = consoleModelLogger, logger: ModelLogger = consoleModelLogger,
progress: (value: number) => Promise<void> progress: (value: number) => Promise<void>
): Promise<void> { ): Promise<void> {
const initWS = branding?.initWorkspace ?? getMetadata(toolPlugin.metadata.InitWorkspace) const initWS = branding?.initWorkspace ?? getMetadata(toolPlugin.metadata.InitWorkspace)
if (initWS === undefined) return const sriptUrl = getMetadata(toolPlugin.metadata.InitScriptURL)
if (initWS === undefined || sriptUrl === undefined) return
const { mongodbUri } = prepareTools([])
const _client = getMongoClient(mongodbUri)
const client = await _client.getClient()
let connection: (CoreClient & BackupClient) | undefined
const storageConfig: StorageConfiguration = storageConfigFromEnv()
const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri)
try { try {
const db = client.db(CONFIG_DB) // `https://raw.githubusercontent.com/hcengineering/init/main/script.yaml`
const scripts = await db.collection<InitScript>(scriptsCol).find({}).toArray() const req = await fetch(sriptUrl)
const text = await req.text()
const scripts = yaml.load(text) as any as InitScript[]
let script: InitScript | undefined let script: InitScript | undefined
if (initWS !== undefined) { if (initWS !== undefined) {
script = scripts.find((it) => it.name === initWS) script = scripts.find((it) => it.name === initWS)
@ -225,24 +227,12 @@ export async function initializeWorkspace (
if (script === undefined) { if (script === undefined) {
return return
} }
try {
connection = (await connect(transactorUrl, workspaceId, undefined, { const initializer = new WorkspaceInitializer(ctx, storageAdapter, wsUrl, client)
model: 'upgrade', await initializer.processScript(script, logger, progress)
admin: 'true'
})) as unknown as CoreClient & BackupClient
await createWorkspaceData(ctx, connection, storageAdapter, workspaceId, script, logger, progress)
} catch (e: any) {
logger.error('error', { error: e })
throw e
}
} catch (err: any) { } catch (err: any) {
ctx.error('Failed to create workspace', { error: err }) ctx.error('Failed to create workspace', { error: err })
throw err throw err
} finally {
await storageAdapter.close()
await connection?.sendForceClose()
await connection?.close()
_client.close()
} }
} }

View File

@ -1,19 +1,22 @@
import { saveCollaborativeDoc } from '@hcengineering/collaboration'
import core, { import core, {
AttachedDoc, AttachedDoc,
Class, Class,
Client, CollaborativeDoc,
Data, Data,
Doc, Doc,
generateId,
MeasureContext, MeasureContext,
Mixin, Mixin,
Ref, Ref,
Space, Space,
TxOperations, TxOperations,
WorkspaceId WorkspaceIdWithUrl
} from '@hcengineering/core' } from '@hcengineering/core'
import { ModelLogger } from '@hcengineering/model' import { ModelLogger } from '@hcengineering/model'
import { makeRank } from '@hcengineering/rank' import { makeRank } from '@hcengineering/rank'
import { AggregatorStorageAdapter } from '@hcengineering/server-core' import { AggregatorStorageAdapter } from '@hcengineering/server-core'
import { parseMessageMarkdown, YDocFromContent } from '@hcengineering/text'
import { v4 as uuid } from 'uuid' import { v4 as uuid } from 'uuid'
const fieldRegexp = /\${\S+?}/ const fieldRegexp = /\${\S+?}/
@ -25,25 +28,43 @@ export interface InitScript {
steps: InitStep<Doc>[] steps: InitStep<Doc>[]
} }
export type InitStep<T extends Doc> = CreateStep<T> | MixinStep<T, T> | UpdateStep<T> | FindStep<T> | UploadStep export type InitStep<T extends Doc> =
| CreateStep<T>
| DefaultStep<T>
| MixinStep<T, T>
| UpdateStep<T>
| FindStep<T>
| UploadStep
export interface CreateStep<T extends Doc> { export interface CreateStep<T extends Doc> {
type: 'create' type: 'create'
_class: Ref<Class<T>> _class: Ref<Class<T>>
data: Props<T> data: Props<T>
markdownFields?: string[]
collabFields?: string[]
resultVariable?: string resultVariable?: string
} }
export interface DefaultStep<T extends Doc> {
type: 'default'
_class: Ref<Class<T>>
data: Props<T>
}
export interface MixinStep<T extends Doc, M extends T> { export interface MixinStep<T extends Doc, M extends T> {
type: 'mixin' type: 'mixin'
_class: Ref<Class<T>> _class: Ref<Class<T>>
mixin: Ref<Mixin<M>> mixin: Ref<Mixin<M>>
markdownFields?: string[]
collabFields?: string[]
data: Props<T> data: Props<T>
} }
export interface UpdateStep<T extends Doc> { export interface UpdateStep<T extends Doc> {
type: 'update' type: 'update'
_class: Ref<Class<T>> _class: Ref<Class<T>>
markdownFields?: string[]
collabFields?: string[]
data: Props<T> data: Props<T>
} }
@ -51,7 +72,7 @@ export interface FindStep<T extends Doc> {
type: 'find' type: 'find'
_class: Ref<Class<T>> _class: Ref<Class<T>>
query: Partial<T> query: Partial<T>
resultVariable?: string resultVariable: string
} }
export interface UploadStep { export interface UploadStep {
@ -64,148 +85,213 @@ export interface UploadStep {
export type Props<T extends Doc> = Data<T> & Partial<Doc> & { space: Ref<Space> } export type Props<T extends Doc> = Data<T> & Partial<Doc> & { space: Ref<Space> }
const nextRank = '#nextRank' export class WorkspaceInitializer {
const now = '#now' private readonly imageUrl = 'image://'
private readonly nextRank = '#nextRank'
private readonly now = '#now'
export async function createWorkspaceData ( constructor (
ctx: MeasureContext, private readonly ctx: MeasureContext,
connection: Client, private readonly storageAdapter: AggregatorStorageAdapter,
storageAdapter: AggregatorStorageAdapter, private readonly wsUrl: WorkspaceIdWithUrl,
workspaceId: WorkspaceId, private readonly client: TxOperations
script: InitScript, ) {}
logger: ModelLogger,
progress: (value: number) => Promise<void> async processScript (
): Promise<void> { script: InitScript,
const client = new TxOperations(connection, core.account.System) logger: ModelLogger,
const vars: Record<string, any> = {} progress: (value: number) => Promise<void>
for (let index = 0; index < script.steps.length; index++) { ): Promise<void> {
const step = script.steps[index] const vars: Record<string, any> = {}
if (step.type === 'create') { const defaults = new Map<Ref<Class<Doc>>, Props<Doc>>()
await processCreate(client, step, vars) for (let index = 0; index < script.steps.length; index++) {
} else if (step.type === 'update') { try {
await processUpdate(client, step, vars) const step = script.steps[index]
} else if (step.type === 'mixin') { if (step.type === 'default') {
await processMixin(client, step, vars) await this.processDefault(step, defaults)
} else if (step.type === 'find') { } else if (step.type === 'create') {
await processFind(client, step, vars) await this.processCreate(step, vars, defaults)
} else if (step.type === 'upload') { } else if (step.type === 'update') {
await processUpload(ctx, storageAdapter, workspaceId, step, vars, logger) await this.processUpdate(step, vars)
} else if (step.type === 'mixin') {
await this.processMixin(step, vars)
} else if (step.type === 'find') {
await this.processFind(step, vars)
} else if (step.type === 'upload') {
await this.processUpload(step, vars, logger)
}
await progress(Math.round(((index + 1) * 100) / script.steps.length))
} catch (error) {
logger.error(`Error in script on step ${index}`, error)
throw error
}
} }
await progress(Math.round(((index + 1) * 100) / script.steps.length))
} }
}
async function processUpload ( private async processDefault<T extends Doc>(
ctx: MeasureContext, step: DefaultStep<T>,
storageAdapter: AggregatorStorageAdapter, defaults: Map<Ref<Class<T>>, Props<T>>
workspaceId: WorkspaceId, ): Promise<void> {
step: UploadStep, const obj = defaults.get(step._class) ?? {}
vars: Record<string, any>, defaults.set(step._class, { ...obj, ...step.data })
logger: ModelLogger }
): Promise<void> {
try { private async processUpload (step: UploadStep, vars: Record<string, any>, logger: ModelLogger): Promise<void> {
const id = uuid() try {
const resp = await fetch(step.fromUrl) const id = uuid()
const buffer = Buffer.from(await resp.arrayBuffer()) const resp = await fetch(step.fromUrl)
await storageAdapter.put(ctx, workspaceId, id, buffer, step.contentType, step.size) const buffer = Buffer.from(await resp.arrayBuffer())
await this.storageAdapter.put(this.ctx, this.wsUrl, id, buffer, step.contentType, step.size)
if (step.resultVariable !== undefined) {
vars[step.resultVariable] = id
}
} catch (error) {
logger.error('Upload failed', error)
}
}
private async processFind<T extends Doc>(step: FindStep<T>, vars: Record<string, any>): Promise<void> {
const query = this.fillProps(step.query, vars)
const res = await this.client.findOne(step._class, { ...(query as any) })
if (res === undefined) {
throw new Error(`Document not found: ${JSON.stringify(query)}`)
}
if (step.resultVariable !== undefined) { if (step.resultVariable !== undefined) {
vars[step.resultVariable] = id vars[step.resultVariable] = res
} }
} catch (error) {
logger.error('Upload failed', error)
} }
}
async function processFind<T extends Doc> ( private async processMixin<T extends Doc>(step: MixinStep<T, T>, vars: Record<string, any>): Promise<void> {
client: TxOperations, const data = await this.fillPropsWithMarkdown(step.data, vars, step.markdownFields)
step: FindStep<T>, const { _id, space, ...props } = data
vars: Record<string, any> if (_id === undefined || space === undefined) {
): Promise<void> { throw new Error('Mixin step must have _id and space')
const query = fillProps(step.query, vars)
const res = await client.findOne(step._class, { ...(query as any) })
if (res === undefined) {
throw new Error(`Document not found: ${JSON.stringify(query)}`)
}
if (step.resultVariable !== undefined) {
vars[step.resultVariable] = res
}
}
async function processMixin<T extends Doc> (
client: TxOperations,
step: MixinStep<T, T>,
vars: Record<string, any>
): Promise<void> {
const data = fillProps(step.data, vars)
const { _id, space, ...props } = data
if (_id === undefined || space === undefined) {
throw new Error('Mixin step must have _id and space')
}
await client.createMixin(_id, step._class, space, step.mixin, props)
}
async function processUpdate<T extends Doc> (
client: TxOperations,
step: UpdateStep<T>,
vars: Record<string, any>
): Promise<void> {
const data = fillProps(step.data, vars)
const { _id, space, ...props } = data
if (_id === undefined || space === undefined) {
throw new Error('Update step must have _id and space')
}
await client.updateDoc(step._class, space, _id as Ref<Doc>, props)
}
async function processCreate<T extends Doc> (
client: TxOperations,
step: CreateStep<T>,
vars: Record<string, any>
): Promise<void> {
const data = fillProps(step.data, vars)
const res = await create(client, step._class, data)
if (step.resultVariable !== undefined) {
vars[step.resultVariable] = res
}
}
async function create<T extends Doc> (client: TxOperations, _class: Ref<Class<T>>, data: Props<T>): Promise<Ref<T>> {
const hierarchy = client.getHierarchy()
if (hierarchy.isDerived(_class, core.class.AttachedDoc)) {
const { space, attachedTo, attachedToClass, collection, ...props } = data as unknown as Props<AttachedDoc>
if (attachedTo === undefined || space === undefined || attachedToClass === undefined || collection === undefined) {
throw new Error('Add collection step must have attachedTo, attachedToClass, collection and space')
} }
return (await client.addCollection( await this.client.createMixin(_id, step._class, space, step.mixin, props)
_class,
space,
attachedTo,
attachedToClass,
collection,
props
)) as unknown as Ref<T>
} else {
const { space, ...props } = data
if (space === undefined) {
throw new Error('Create step must have space')
}
return await client.createDoc<T>(_class, space, props as Data<T>)
} }
}
function fillProps<T extends Doc, P extends Partial<T> | Props<T>> (data: P, vars: Record<string, any>): P { private async processUpdate<T extends Doc>(step: UpdateStep<T>, vars: Record<string, any>): Promise<void> {
for (const key in data) { const data = await this.fillPropsWithMarkdown(step.data, vars, step.markdownFields)
let value = (data as any)[key] const { _id, space, ...props } = data
if (_id === undefined || space === undefined) {
throw new Error('Update step must have _id and space')
}
await this.client.updateDoc(step._class, space, _id as Ref<Doc>, props)
}
private async processCreate<T extends Doc>(
step: CreateStep<T>,
vars: Record<string, any>,
defaults: Map<Ref<Class<T>>, Props<T>>
): Promise<void> {
const _id = generateId<T>()
if (step.resultVariable !== undefined) {
vars[step.resultVariable] = _id
}
const data = await this.fillPropsWithMarkdown(
{ ...(defaults.get(step._class) ?? {}), ...step.data },
vars,
step.markdownFields
)
if (step.collabFields !== undefined) {
for (const field of step.collabFields) {
if ((data as any)[field] !== undefined) {
const res = await this.createCollab((data as any)[field], field, _id)
;(data as any)[field] = res
}
}
}
await this.create(step._class, data, _id)
}
private parseMarkdown (text: string): string {
const json = parseMessageMarkdown(text ?? '', this.imageUrl)
return JSON.stringify(json)
}
private async create<T extends Doc>(_class: Ref<Class<T>>, data: Props<T>, _id?: Ref<T>): Promise<Ref<T>> {
const hierarchy = this.client.getHierarchy()
if (hierarchy.isDerived(_class, core.class.AttachedDoc)) {
const { space, attachedTo, attachedToClass, collection, ...props } = data as unknown as Props<AttachedDoc>
if (
attachedTo === undefined ||
space === undefined ||
attachedToClass === undefined ||
collection === undefined
) {
throw new Error('Add collection step must have attachedTo, attachedToClass, collection and space')
}
return (await this.client.addCollection(_class, space, attachedTo, attachedToClass, collection, props),
_id) as unknown as Ref<T>
} else {
const { space, ...props } = data
if (space === undefined) {
throw new Error('Create step must have space')
}
return await this.client.createDoc<T>(_class, space, props as Data<T>, _id)
}
}
private async fillPropsWithMarkdown<T extends Doc, P extends Partial<T> | Props<T>>(
data: P,
vars: Record<string, any>,
markdownFields?: string[]
): Promise<P> {
data = await this.fillProps(data, vars)
if (markdownFields !== undefined) {
for (const field of markdownFields) {
if ((data as any)[field] !== undefined) {
try {
const res = this.parseMarkdown((data as any)[field])
;(data as any)[field] = res
} catch (error) {
console.log(error)
}
}
}
}
return data
}
private async createCollab (data: string, field: string, _id: Ref<Doc>): Promise<string> {
const json = parseMessageMarkdown(data ?? '', this.imageUrl)
const id = `${_id}%${field}`
const collabId = `${id}:HEAD:0` as CollaborativeDoc
const yDoc = YDocFromContent(json, field)
await saveCollaborativeDoc(this.storageAdapter, this.wsUrl, collabId, yDoc, this.ctx)
return collabId
}
private async fillProps<T extends Doc, P extends Partial<T> | Props<T>>(
data: P,
vars: Record<string, any>
): Promise<P> {
for (const key in data) {
const value = (data as any)[key]
;(data as any)[key] = await this.fillValue(value, vars)
}
return data
}
private async fillValue (value: any, vars: Record<string, any>): Promise<any> {
if (typeof value === 'object') { if (typeof value === 'object') {
;(data as any)[key] = fillProps(value, vars) if (Array.isArray(value)) {
return await Promise.all(value.map(async (v) => await this.fillValue(v, vars)))
} else {
return await this.fillProps(value, vars)
}
} else if (typeof value === 'string') { } else if (typeof value === 'string') {
if (value === nextRank) { if (value === this.nextRank) {
const rank = makeRank(vars[nextRank], undefined) const rank = makeRank(vars[this.nextRank], undefined)
;(data as any)[key] = rank vars[this.nextRank] = rank
vars[nextRank] = rank return rank
} else if (value === now) { } else if (value === this.now) {
;(data as any)[key] = new Date().getTime() return new Date().getTime()
} else { } else {
while (true) { while (true) {
const matched = fieldRegexp.exec(value) const matched = fieldRegexp.exec(value)
@ -216,9 +302,8 @@ function fillProps<T extends Doc, P extends Partial<T> | Props<T>> (data: P, var
fieldRegexp.lastIndex = 0 fieldRegexp.lastIndex = 0
} }
} }
;(data as any)[key] = value return value
} }
} }
} }
return data
} }

View File

@ -13,6 +13,7 @@ const toolPlugin = plugin(toolId, {
Endpoint: '' as Metadata<string>, Endpoint: '' as Metadata<string>,
Transactor: '' as Metadata<string>, Transactor: '' as Metadata<string>,
InitWorkspace: '' as Metadata<string>, InitWorkspace: '' as Metadata<string>,
InitScriptURL: '' as Metadata<string>,
UserAgent: '' as Metadata<string> UserAgent: '' as Metadata<string>
} }
}) })