Paste HTML as Markdown in documentation editor (#11773)

Convert HTML clipboard data to Markdown when pasting into documentation editor:

https://github.com/user-attachments/assets/e87ba419-7465-48a7-bb08-101a76012a72

# Important Notes
- The best HTML-to-Markdown converter available seems to be Lexical. It's a large dependency, but this implementation lazily loads it only when rich pasting functionality is needed. In the future, we could cut it down a lot; we only need (parts of) the node definitions, the transformers, and (parts of) the core.
- Not all formatting in the example webpage is supported yet; some issues added to #11772.
- Also: Fix undo/redo in documentation editor (prevent graph editor from also handling).
This commit is contained in:
Kaz Wesley 2024-12-06 09:00:24 -08:00 committed by GitHub
parent 6aa4c9f35e
commit 640d66294e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 495 additions and 4 deletions

View File

@ -45,6 +45,9 @@
"@aws-amplify/core": "5.8.5",
"@hookform/resolvers": "^3.4.0",
"@internationalized/date": "^3.5.5",
"@lexical/html": "^0.21.0",
"@lexical/link": "^0.21.0",
"@lexical/markdown": "^0.21.0",
"@monaco-editor/react": "4.6.0",
"@sentry/react": "^7.74.0",
"@react-aria/interactions": "^3.22.3",
@ -61,6 +64,7 @@
"idb-keyval": "^6.2.1",
"input-otp": "1.2.4",
"is-network-error": "^1.0.1",
"lexical": "^0.21.0",
"monaco-editor": "0.48.0",
"qrcode.react": "3.1.0",
"react": "^18.3.1",

View File

@ -4,11 +4,13 @@ import { useDocumentationImages } from '@/components/DocumentationEditor/images'
import { transformPastedText } from '@/components/DocumentationEditor/textPaste'
import FullscreenButton from '@/components/FullscreenButton.vue'
import MarkdownEditor from '@/components/MarkdownEditor.vue'
import { htmlToMarkdown } from '@/components/MarkdownEditor/htmlToMarkdown'
import WithFullscreenMode from '@/components/WithFullscreenMode.vue'
import { useGraphStore } from '@/stores/graph'
import { useProjectStore } from '@/stores/project'
import { useProjectFiles } from '@/stores/projectFiles'
import { ComponentInstance, ref, toRef, watch } from 'vue'
import { normalizeMarkdown } from 'ydoc-shared/ast/documentation'
import * as Y from 'yjs'
const { yText } = defineProps<{
@ -41,13 +43,20 @@ function handlePaste(raw: boolean) {
window.navigator.clipboard.read().then(async (items) => {
if (!markdownEditor.value) return
for (const item of items) {
if (tryUploadPastedImage(item)) return
if (tryUploadPastedImage(item)) continue
const htmlType = item.types.find((type) => type === 'text/html')
if (htmlType) {
const blob = await item.getType(htmlType)
const html = await blob.text()
const markdown = normalizeMarkdown(await htmlToMarkdown(html))
markdownEditor.value.putText(markdown)
continue
}
const textType = item.types.find((type) => type === 'text/plain')
if (textType) {
const blob = await item.getType(textType)
const rawText = await blob.text()
markdownEditor.value.putText(raw ? rawText : transformPastedText(rawText))
return
}
}
})

View File

@ -252,7 +252,7 @@ useEvent(
'keydown',
(event) =>
interactionBindingsHandler(event) ||
(!keyboardBusyExceptIn(documentationEditorArea.value) && undoBindingsHandler(event)) ||
(!keyboardBusy() && undoBindingsHandler(event)) ||
(!keyboardBusy() && graphBindingsHandler(event)) ||
(!keyboardBusyExceptIn(codeEditorArea.value) && codeEditorHandler(event)) ||
(!keyboardBusyExceptIn(documentationEditorArea.value) && documentationEditorHandler(event)) ||

View File

@ -0,0 +1,125 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
exports[`htmlToMarkdown: google_docs1.html.bin 1`] = `
"# Title
Subtitle
# Heading1
## Heading2
### Heading3
#### Heading4
##### Heading5
###### *Heading6*
**Bold**, *italic*, and *underlined*.
Unordered list:
- Item
- Another
Numbered list:
1. First
2. Second
Link: [Text](http://example.com)
![](https://lh7-rt.googleusercontent.com/docsz/AD_4nXeCL-N-bkbZ3JKJqp86wjPGiXe0hSlYQVnr6nqfI4k37Mrf0USAOhbXyIyXzdIVhuD60vPN4BHHWf64F5b80nonbX63keL0BJRe2KsBQMInXS_Sn6Elj-SdB34CUc3d1xOllHf7WA?key=oYRu0j12MJwt-XgjX8Oamy3V)"
`;
exports[`htmlToMarkdown: preppin_data_2024w1.html.bin 1`] = `
"### 2024: Week 1 - Prep Air's Flow Card
[January 03, 2024](https://preppindata.blogspot.com/2024/01/2024-week-1-prep-airs-flow-card.html "permanent link")
Created by: Carl Allchin
Welcome to a New Year of Preppin' Data challenges. For anyone new to the challenges then let us give you an overview how the weekly challenge works. 
- Each Wednesday the Preppin' crew (Jenny, myself or a guest contributor) drop a data set(s) that requires some reshaping and/or cleaning to get it ready for analysis. 
- You can use any tool or language you want to do the reshaping (we build the challenges in Tableau Prep but love seeing different tools being learnt / tried).
- Share your solution on LinkedIn, Twitter/X, GitHub or the [Tableau Forums](https://community.tableau.com/s/group/0F94T000000gQqoSAE/preppindata)
- Fill out [our tracker](https://docs.google.com/forms/d/e/1FAIpQLSdZzudRXwUvjhWwNawwz1kGXcYeQ9gBnAhCOvlA7qEDknGu4A/viewform) so you can monitor your progress and involvement
- The following Tuesday we will post a written solution in Tableau Prep (thanks Tom) and a video walkthrough too (thanks Jenny)
As with each January for the last few years, we'll set a number of challenges aimed at beginners. This is a great way to learn a number of fundamental data preparation skills or a chance to learn a new tool — New Year's resolution anyone? So on to this year's challenges!
### Context
At Preppin' Data we use a number of (mock) companies to look at the challenges they have with their data. For January, we're going to focus on our own airline, Prep Air. The airline has introduced a new loyalty card called the Flow Card. We need to clean up a number of data sets to determine how well the card is doing. 
The first task is setting some context for later weeks by understanding how popular the Flow Card is. Our stakeholder would like two data sets about our passengers. One data set for card users and one data set for those who don't use the card. 
### Input
There is one input file. You can [download it from here](https://drive.google.com/file/d/1STVYZvXzfGMuEq9Yq3yYOmCDCFq4iB0Z/view?usp=share_link).
![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEhyWUGjqNfzjbejKFop5EhJ5AvmFduIULRfKCy8CVzOyaqJQHQhyphenhyphen6Vgna7P_Ozfl0WvIZgd-JhihJVQJWRbkSeu9BbI8bjJ2_bFJg-OaOA8VE9yV8UWaFYL1xiAxGKGwIyGW9ebPqsy31xiw7SCQ610Y__4KiqoZdbhpqZssBrmbFLXpQUY_7nfbE1gHN9v/w640-h160/Screenshot%202023-12-12%20at%2020.37.44.png)
### Requirements
- Input the data
- Split the Flight Details field to form:
- Date 
- Flight Number
- From
- To
- Class
- Price
- Convert the following data fields to the correct data types:
- Date to a date format
- Price to a decimal value
- Change the Flow Card field to Yes / No values instead of 1 / 0
- Create two tables, one for Flow Card holders and one for non-Flow Card holders
- Output the data sets
### Output
Both outputs have the same data structure:
9 data fields:
- Date
- Flight Number
- From
- To
- Class
- Price
- Flow Card?
- Bags Checked
- Meal Type
Output 1
![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEjZvuMPD03wpttqY9OsQPWCC0MpBio6CDLUEfN_D9UW25P0vAYqTHITYkuuHxu5mSNjBnCpjg8iksGvW0u8ee2JOcfjxocZ6ob2CwMDtQw18BdYCv9ZJwDwdKXi2M6uMvv5b3jEYB2egkI1D7QEMVVn7xPBE9dZLizrFRx30g7bblg0f6e4OFjM1uGZAxEm/w640-h118/Screenshot%202023-12-12%20at%2021.09.34.png)
Rows: 1883 (1884 including the header)
Output 2
![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEi1WTdeAiUOCwQncp073UYOxBTo_LCbL8L_AiEOr_BBzfkRCkiboiK9r5pF2lMCCN5p9iddBCgbpz5bsvgod2W9jmBLZepuk6uWOQaFMz6mhKTVLtgxfVdjWWM8E2BFGeNzALsz81YZ4FdswF4_KshBS2JHsXUVIEt1qlS4obFDTnA368aVxLORbCuqsvjG/w640-h102/Screenshot%202023-12-12%20at%2021.10.25.png)
Rows: 1895 (1896 including the header)
You can download the [output from here](https://drive.google.com/drive/folders/1-lt4XWZSn92gCBUiMT6ZDAoEzGyUZGJm?usp=share_link).
After you finish the challenge make sure to fill in the [participation tracker](https://docs.google.com/forms/d/e/1FAIpQLSdZzudRXwUvjhWwNawwz1kGXcYeQ9gBnAhCOvlA7qEDknGu4A/viewform), then share your solution on Twitter using [#PreppinData](https://twitter.com/search?q=%23preppindata&src=typed_query) and tagging [@Datajedininja](https://twitter.com/Datajedininja), [@JennyMartinDS14](https://twitter.com/JennyMartinDS14) & [@TomProwse1](https://twitter.com/TomProwse1)
You can also post your solution on the [Tableau Forum](https://community.tableau.com/s/group/0F94T000000gQqoSAE/preppindata) where we have a Preppin' Data community page. Post your solutions and ask questions if you need any help! "
`;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,11 @@
import { htmlToMarkdown } from '@/components/MarkdownEditor/htmlToMarkdown'
import * as fs from 'node:fs'
import { expect, test } from 'vitest'
test.each(['google_docs1.html.bin', 'preppin_data_2024w1.html.bin'])(
'htmlToMarkdown: %s',
async (file) => {
const html = fs.readFileSync(`${__dirname}/fixtures/${file}`).toString()
expect(await htmlToMarkdown(html)).toMatchSnapshot()
},
)

View File

@ -0,0 +1,21 @@
import { IMAGE } from '@/components/MarkdownEditor/htmlToMarkdown/imageNode'
import { LINK } from '@/components/MarkdownEditor/htmlToMarkdown/linkNode'
import { $generateNodesFromDOM } from '@lexical/html'
import { $convertToMarkdownString, TRANSFORMERS } from '@lexical/markdown'
import { $insertNodes, createEditor } from 'lexical'
const domParser = new DOMParser()
const transformers = [IMAGE, LINK, ...TRANSFORMERS]
/** Convert the given HTML text to a Markdown approximation. */
export function htmlToMarkdownImpl(html: string): string {
const dom = domParser.parseFromString(html, 'text/html')
const editor = createEditor({
nodes: transformers.flatMap((transformer) =>
'dependencies' in transformer ? transformer.dependencies : [],
),
})
editor.update(() => $insertNodes($generateNodesFromDOM(editor, dom)))
return editor.read(() => $convertToMarkdownString(transformers))
}

View File

@ -0,0 +1,107 @@
import { type TextMatchTransformer } from '@lexical/markdown'
import {
type DOMConversionMap,
type DOMConversionOutput,
type NodeKey,
$applyNodeReplacement,
DecoratorNode,
} from 'lexical'
interface ImagePayload {
altText: string
key?: NodeKey | undefined
src: string
}
function $convertImageElement(domNode: Node): null | DOMConversionOutput {
if (domNode instanceof HTMLImageElement) {
const { alt: altText, src } = domNode
const node = $createImageNode({ altText, src })
return { node }
}
return null
}
function notImplemented(): never {
throw new Error('not implemented')
}
/** TODO: Add docs */
export class ImageNode extends DecoratorNode<void> {
__src: string
__altText: string
/** TODO: Add docs */
static override getType(): string {
return 'image'
}
/** TODO: Add docs */
static override clone(node: ImageNode): ImageNode {
return new ImageNode(node.__src, node.__altText, node.__key)
}
/** TODO: Add docs */
static override importDOM(): DOMConversionMap | null {
return {
img: (_node: Node) => ({
conversion: $convertImageElement,
priority: 0,
}),
}
}
/** TODO: Add docs */
constructor(src: string, altText: string, key?: NodeKey) {
super(key)
this.__src = src
this.__altText = altText
}
/** TODO: Add docs */
override exportJSON() {
return {
altText: this.getAltText(),
src: this.getSrc(),
type: 'image',
version: 1,
}
}
/** TODO: Add docs */
getSrc(): string {
return this.__src
}
/** TODO: Add docs */
getAltText(): string {
return this.__altText
}
/** Not used, but required by API */
override decorate() {}
static override importJSON = notImplemented
override exportDOM = notImplemented
override createDOM = notImplemented
override updateDOM = notImplemented
}
/** Type predicate for {@link ImageNode} */
export function $isImageNode(node: unknown): node is ImageNode {
return node instanceof ImageNode
}
function $createImageNode({ altText, src, key }: ImagePayload): ImageNode {
return $applyNodeReplacement(new ImageNode(src, altText, key))
}
export const IMAGE: TextMatchTransformer = {
dependencies: [ImageNode],
export: (node) => {
if (!$isImageNode(node)) return null
return `![${node.getAltText()}](${node.getSrc()})`
},
regExp: /$^/,
type: 'text-match',
}

View File

@ -0,0 +1,10 @@
/**
* Convert the given HTML text to a Markdown approximation. The supporting libraries will be loaded the first time this
* is called.
*/
export async function htmlToMarkdown(html: string): Promise<string> {
const { htmlToMarkdownImpl } = await import(
'@/components/MarkdownEditor/htmlToMarkdown/htmlToMarkdownImpl'
)
return htmlToMarkdownImpl(html)
}

View File

@ -0,0 +1,32 @@
import { $isImageNode, IMAGE } from '@/components/MarkdownEditor/htmlToMarkdown/imageNode'
import { $isLinkNode, LinkNode } from '@lexical/link'
import { type Transformer } from '@lexical/markdown'
import { $isTextNode } from 'lexical'
export { LinkNode } from '@lexical/link'
export const LINK: Transformer = {
dependencies: [LinkNode],
export: (node, exportChildren, exportFormat) => {
if (!$isLinkNode(node)) return null
const title = node.getTitle()
const linkContent =
title ?
`[${node.getTextContent()}](${node.getURL()} "${title}")`
: `[${node.getTextContent()}](${node.getURL()})`
const firstChild = node.getFirstChild()
// Add text styles only if link has single text node inside. If it's more
// than one we ignore it as markdown does not support nested styles for links
if (node.getChildrenSize() === 1 && $isTextNode(firstChild)) {
return exportFormat(firstChild, linkContent)
} else if (node.getChildrenSize() === 1 && $isImageNode(firstChild)) {
// Images sometimes happen to be inside links (when importing nodes from HTML).
// The link is not important for us (this type of layout is not supported in markdown),
// but we want to display the image.
return IMAGE.export!(firstChild, exportChildren, exportFormat)
} else {
return linkContent
}
},
regExp: /$^/,
type: 'text-match',
}

View File

@ -102,7 +102,7 @@ function toRawMarkdown(elements: undefined | TextToken<ConcreteRefs>[]) {
* Convert the Markdown input to a format with rendered-style linebreaks: Hard-wrapped lines within a paragraph will be
* joined, and only a single linebreak character is used to separate paragraphs.
*/
function normalizeMarkdown(rawMarkdown: string): string {
export function normalizeMarkdown(rawMarkdown: string): string {
let normalized = ''
let prevTo = 0
let prevName: string | undefined = undefined

View File

@ -157,6 +157,15 @@ importers:
'@internationalized/date':
specifier: ^3.5.5
version: 3.5.5
'@lexical/html':
specifier: ^0.21.0
version: 0.21.0
'@lexical/link':
specifier: ^0.21.0
version: 0.21.0
'@lexical/markdown':
specifier: ^0.21.0
version: 0.21.0
'@lezer/common':
specifier: ^1.1.0
version: 1.2.1
@ -247,6 +256,9 @@ importers:
isomorphic-ws:
specifier: ^5.0.0
version: 5.0.0(ws@8.18.0)
lexical:
specifier: ^0.21.0
version: 0.21.0
lib0:
specifier: ^0.2.85
version: 0.2.94
@ -1959,6 +1971,39 @@ packages:
'@json-schema-tools/traverse@1.10.4':
resolution: {integrity: sha512-9e42zjhLIxzBONroNC4SGsTqdB877tzwH2S6lqgTav9K24kWJR9vNieeMVSuyqnY8FlclH21D8wsm/tuD9WA9Q==}
'@lexical/clipboard@0.21.0':
resolution: {integrity: sha512-3lNMlMeUob9fcnRXGVieV/lmPbmet/SVWckNTOwzfKrZ/YW5HiiyJrWviLRVf50dGXTbmBGt7K/2pfPYvWCHFA==}
'@lexical/code@0.21.0':
resolution: {integrity: sha512-E0DNSFu4I+LMn3ft+UT0Dbntc8ZKjIA0BJj6BDewm0qh3bir40YUf5DkI2lpiFNRF2OpcmmcIxakREeU6avqTA==}
'@lexical/html@0.21.0':
resolution: {integrity: sha512-UGahVsGz8OD7Ya39qwquE+JPStTxCw/uaQrnUNorCM7owtPidO2H+tsilAB3A1GK3ksFGdHeEjBjG0Gf7gOg+Q==}
'@lexical/link@0.21.0':
resolution: {integrity: sha512-/coktIyRXg8rXz/7uxXsSEfSQYxPIx8CmignAXWYhcyYtCWA0fD2mhEhWwVvHH9ofNzvidclRPYKUnrmUm3z3Q==}
'@lexical/list@0.21.0':
resolution: {integrity: sha512-WItGlwwNJCS8b6SO1QPKzArShmD+OXQkLbhBcAh+EfpnkvmCW5T5LqY+OfIRmEN1dhDOnwqCY7mXkivWO8o5tw==}
'@lexical/markdown@0.21.0':
resolution: {integrity: sha512-XCQCyW5ujK0xR6evV8sF0hv/MRUA//kIrB2JiyF12tLQyjLRNEXO+0IKastWnMKSaDdJMKjzgd+4PiummYs7uA==}
'@lexical/rich-text@0.21.0':
resolution: {integrity: sha512-+pvEKUneEkGfWOSTl9jU58N9knePilMLxxOtppCAcgnaCdilOh3n5YyRppXhvmprUe0JaTseCMoik2LP51G/JA==}
'@lexical/selection@0.21.0':
resolution: {integrity: sha512-4u53bc8zlPPF0rnHjsGQExQ1St8NafsDd70/t1FMw7yvoMtUsKdH7+ap00esLkJOMv45unJD7UOzKRqU1X0sEA==}
'@lexical/table@0.21.0':
resolution: {integrity: sha512-JhylAWcf4qKD4FmxMUt3YzH5zg2+baBr4+/haLZL7178hMvUzJwGIiWk+3hD3phzmW3WrP49uFXzM7DMSCkE8w==}
'@lexical/text@0.21.0':
resolution: {integrity: sha512-ceB4fhYejCoR8ID4uIs0sO/VyQoayRjrRWTIEMvOcQtwUkcyciKRhY0A7f2wVeq/MFStd+ajLLjy4WKYK5zUnA==}
'@lexical/utils@0.21.0':
resolution: {integrity: sha512-YzsNOAiLkCy6R3DuP18gtseDrzgx+30lFyqRvp5M7mckeYgQElwdfG5biNFDLv7BM9GjSzgU5Cunjycsx6Sjqg==}
'@lezer/common@1.2.1':
resolution: {integrity: sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==}
@ -5813,6 +5858,9 @@ packages:
resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==}
engines: {node: '>= 0.8.0'}
lexical@0.21.0:
resolution: {integrity: sha512-Dxc5SCG4kB+wF+Rh55ism3SuecOKeOtCtGHFGKd6pj2QKVojtjkxGTQPMt7//2z5rMSue4R+hmRM0pCEZflupA==}
lib0@0.2.94:
resolution: {integrity: sha512-hZ3p54jL4Wpu7IOg26uC7dnEWiMyNlUrb9KoG7+xYs45WkQwpVvKFndVq2+pqLYKe1u8Fp3+zAfZHVvTK34PvQ==}
engines: {node: '>=16'}
@ -6694,6 +6742,10 @@ packages:
resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
prismjs@1.29.0:
resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==}
engines: {node: '>=6'}
process-nextick-args@2.0.1:
resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
@ -9503,6 +9555,74 @@ snapshots:
'@json-schema-tools/traverse@1.10.4': {}
'@lexical/clipboard@0.21.0':
dependencies:
'@lexical/html': 0.21.0
'@lexical/list': 0.21.0
'@lexical/selection': 0.21.0
'@lexical/utils': 0.21.0
lexical: 0.21.0
'@lexical/code@0.21.0':
dependencies:
'@lexical/utils': 0.21.0
lexical: 0.21.0
prismjs: 1.29.0
'@lexical/html@0.21.0':
dependencies:
'@lexical/selection': 0.21.0
'@lexical/utils': 0.21.0
lexical: 0.21.0
'@lexical/link@0.21.0':
dependencies:
'@lexical/utils': 0.21.0
lexical: 0.21.0
'@lexical/list@0.21.0':
dependencies:
'@lexical/utils': 0.21.0
lexical: 0.21.0
'@lexical/markdown@0.21.0':
dependencies:
'@lexical/code': 0.21.0
'@lexical/link': 0.21.0
'@lexical/list': 0.21.0
'@lexical/rich-text': 0.21.0
'@lexical/text': 0.21.0
'@lexical/utils': 0.21.0
lexical: 0.21.0
'@lexical/rich-text@0.21.0':
dependencies:
'@lexical/clipboard': 0.21.0
'@lexical/selection': 0.21.0
'@lexical/utils': 0.21.0
lexical: 0.21.0
'@lexical/selection@0.21.0':
dependencies:
lexical: 0.21.0
'@lexical/table@0.21.0':
dependencies:
'@lexical/clipboard': 0.21.0
'@lexical/utils': 0.21.0
lexical: 0.21.0
'@lexical/text@0.21.0':
dependencies:
lexical: 0.21.0
'@lexical/utils@0.21.0':
dependencies:
'@lexical/list': 0.21.0
'@lexical/selection': 0.21.0
'@lexical/table': 0.21.0
lexical: 0.21.0
'@lezer/common@1.2.1': {}
'@lezer/css@1.1.9':
@ -14497,6 +14617,8 @@ snapshots:
prelude-ls: 1.2.1
type-check: 0.4.0
lexical@0.21.0: {}
lib0@0.2.94:
dependencies:
isomorphic.js: 0.2.5
@ -15256,6 +15378,8 @@ snapshots:
ansi-styles: 5.2.0
react-is: 18.3.1
prismjs@1.29.0: {}
process-nextick-args@2.0.1: {}
process@0.11.10: {}