mirror of
https://github.com/enso-org/enso.git
synced 2024-11-22 03:32:23 +03:00
Pasting image in Documentation Editor (#11547)
Fixes #10059 https://github.com/user-attachments/assets/a528e26a-b388-4a2a-9bf4-3ccc734373f6 # Important Notes * I put the logic for project's files management to a single composable "projectFiles"
This commit is contained in:
parent
66df53a2e4
commit
10e1b76f57
@ -24,6 +24,8 @@
|
||||
component.][11452]
|
||||
- [New documentation editor provides improved Markdown editing experience, and
|
||||
paves the way for new documentation features.][11469]
|
||||
- [You can now add images to documentation panel][11547] by pasting them from
|
||||
clipboard or by drag'n'dropping image files.
|
||||
- ["Write" button in component menu allows to evaluate it separately from the
|
||||
rest of the workflow][11523].
|
||||
|
||||
@ -42,6 +44,7 @@
|
||||
[11448]: https://github.com/enso-org/enso/pull/11448
|
||||
[11452]: https://github.com/enso-org/enso/pull/11452
|
||||
[11469]: https://github.com/enso-org/enso/pull/11469
|
||||
[11547]: https://github.com/enso-org/enso/pull/11547
|
||||
[11523]: https://github.com/enso-org/enso/pull/11523
|
||||
|
||||
#### Enso Standard Library
|
||||
|
@ -7,13 +7,18 @@ import * as locate from './locate'
|
||||
test('Main method documentation', async ({ page }) => {
|
||||
await actions.goToGraph(page)
|
||||
|
||||
const rightDock = locate.rightDock(page)
|
||||
// Documentation panel hotkey opens right-dock.
|
||||
await expect(locate.rightDock(page)).toBeHidden()
|
||||
await expect(rightDock).toBeHidden()
|
||||
await page.keyboard.press(`${CONTROL_KEY}+D`)
|
||||
await expect(locate.rightDock(page)).toBeVisible()
|
||||
await expect(rightDock).toBeVisible()
|
||||
|
||||
// Right-dock displays main method documentation.
|
||||
await expect(locate.editorRoot(locate.rightDock(page))).toHaveText('The main method')
|
||||
await expect(locate.editorRoot(rightDock)).toContainText('The main method')
|
||||
// All three images are loaded properly
|
||||
await expect(rightDock.getByAltText('Image')).toHaveCount(3)
|
||||
for (const img of await rightDock.getByAltText('Image').all())
|
||||
await expect(img).toHaveJSProperty('naturalWidth', 3)
|
||||
|
||||
// Documentation hotkey closes right-dock.p
|
||||
await page.keyboard.press(`${CONTROL_KEY}+D`)
|
||||
|
@ -12,6 +12,7 @@ export const codeEditorBindings = defineKeybinds('code-editor', {
|
||||
export const documentationEditorBindings = defineKeybinds('documentation-editor', {
|
||||
toggle: ['Mod+D'],
|
||||
openLink: ['Mod+PointerMain'],
|
||||
paste: ['Mod+V'],
|
||||
})
|
||||
|
||||
export const interactionBindings = defineKeybinds('current-interaction', {
|
||||
|
@ -1,13 +1,17 @@
|
||||
<script setup lang="ts">
|
||||
import { documentationEditorBindings } from '@/bindings'
|
||||
import FullscreenButton from '@/components/FullscreenButton.vue'
|
||||
import MarkdownEditor from '@/components/MarkdownEditor.vue'
|
||||
import { fetcherUrlTransformer } from '@/components/MarkdownEditor/imageUrlTransformer'
|
||||
import WithFullscreenMode from '@/components/WithFullscreenMode.vue'
|
||||
import { useGraphStore } from '@/stores/graph'
|
||||
import { useProjectStore } from '@/stores/project'
|
||||
import { useProjectFiles } from '@/stores/projectFiles'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import type { ToValue } from '@/util/reactivity'
|
||||
import { ref, toRef, toValue, watch } from 'vue'
|
||||
import type { Path } from 'ydoc-shared/languageServerTypes'
|
||||
import { useToast } from '@/util/toast'
|
||||
import { ComponentInstance, computed, reactive, ref, toRef, toValue, watch } from 'vue'
|
||||
import type { Path, Uuid } from 'ydoc-shared/languageServerTypes'
|
||||
import { Err, Ok, mapOk, withContext, type Result } from 'ydoc-shared/util/data/result'
|
||||
import * as Y from 'yjs'
|
||||
|
||||
@ -19,26 +23,42 @@ const emit = defineEmits<{
|
||||
}>()
|
||||
|
||||
const toolbarElement = ref<HTMLElement>()
|
||||
const markdownEditor = ref<ComponentInstance<typeof MarkdownEditor>>()
|
||||
|
||||
const graphStore = useGraphStore()
|
||||
const projectStore = useProjectStore()
|
||||
const { transformImageUrl } = useDocumentationImages(
|
||||
const { transformImageUrl, uploadImage } = useDocumentationImages(
|
||||
toRef(graphStore, 'modulePath'),
|
||||
projectStore.readFileBinary,
|
||||
useProjectFiles(projectStore),
|
||||
)
|
||||
const uploadErrorToast = useToast.error()
|
||||
|
||||
type UploadedImagePosition = { type: 'selection' } | { type: 'coords'; coords: Vec2 }
|
||||
|
||||
/**
|
||||
* A Project File management API for {@link useDocumentationImages} composable.
|
||||
*/
|
||||
interface ProjectFilesAPI {
|
||||
projectRootId: Promise<Uuid | undefined>
|
||||
readFileBinary(path: Path): Promise<Result<Blob>>
|
||||
writeFileBinary(path: Path, content: Blob): Promise<Result>
|
||||
pickUniqueName(path: Path, suggestedName: string): Promise<Result<string>>
|
||||
ensureDirExists(path: Path): Promise<Result<void>>
|
||||
}
|
||||
|
||||
function useDocumentationImages(
|
||||
modulePath: ToValue<Path | undefined>,
|
||||
readFileBinary: (path: Path) => Promise<Result<Blob>>,
|
||||
projectFiles: ProjectFilesAPI,
|
||||
) {
|
||||
async function urlToPath(url: string): Promise<Result<Path> | undefined> {
|
||||
function urlToPath(url: string): Result<Path> | undefined {
|
||||
const modulePathValue = toValue(modulePath)
|
||||
if (!modulePathValue) {
|
||||
return Err('Current module path is unknown.')
|
||||
}
|
||||
const appliedUrl = new URL(url, `file:///${modulePathValue.segments.join('/')}`)
|
||||
if (appliedUrl.protocol === 'file:') {
|
||||
const segments = appliedUrl.pathname.split('/')
|
||||
// The pathname starts with '/', so we remove "" segment.
|
||||
const segments = decodeURI(appliedUrl.pathname).split('/').slice(1)
|
||||
return Ok({ rootId: modulePathValue.rootId, segments })
|
||||
} else {
|
||||
// Not a relative URL, custom fetching not needed.
|
||||
@ -54,24 +74,81 @@ function useDocumentationImages(
|
||||
return pathUniqueId(path)
|
||||
}
|
||||
|
||||
const currentlyUploading = reactive(new Map<string, Promise<Blob>>())
|
||||
|
||||
const transformImageUrl = fetcherUrlTransformer(
|
||||
async (url: string) => {
|
||||
const path = await urlToPath(url)
|
||||
if (!path) return
|
||||
return withContext(
|
||||
() => `Locating documentation image (${url})`,
|
||||
() => mapOk(path, (path) => ({ location: path, uniqueId: pathUniqueId(path) })),
|
||||
() =>
|
||||
mapOk(path, (path) => {
|
||||
const id = pathUniqueId(path)
|
||||
return {
|
||||
location: path,
|
||||
uniqueId: id,
|
||||
uploading: computed(() => currentlyUploading.has(id)),
|
||||
}
|
||||
}),
|
||||
)
|
||||
},
|
||||
async (path) => {
|
||||
return withContext(
|
||||
() => `Loading documentation image (${pathDebugRepr(path)})`,
|
||||
async () => await readFileBinary(path),
|
||||
async () => {
|
||||
const uploaded = await currentlyUploading.get(pathUniqueId(path))
|
||||
return uploaded ? Ok(uploaded) : projectFiles.readFileBinary(path)
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
return { transformImageUrl }
|
||||
async function uploadImage(
|
||||
name: string,
|
||||
blobPromise: Promise<Blob>,
|
||||
position: UploadedImagePosition = { type: 'selection' },
|
||||
) {
|
||||
const rootId = await projectFiles.projectRootId
|
||||
if (!rootId) {
|
||||
uploadErrorToast.show('Cannot upload image: unknown project file tree root.')
|
||||
return
|
||||
}
|
||||
if (!markdownEditor.value || !markdownEditor.value.loaded) {
|
||||
console.error('Tried to upload image while mardown editor is still not loaded')
|
||||
return
|
||||
}
|
||||
const dirPath = { rootId, segments: ['images'] }
|
||||
await projectFiles.ensureDirExists(dirPath)
|
||||
const filename = await projectFiles.pickUniqueName(dirPath, name)
|
||||
if (!filename.ok) {
|
||||
uploadErrorToast.reportError(filename.error)
|
||||
return
|
||||
}
|
||||
const path: Path = { rootId, segments: ['images', filename.value] }
|
||||
const id = pathUniqueId(path)
|
||||
currentlyUploading.set(id, blobPromise)
|
||||
|
||||
const insertedLink = `\n![Image](/images/${encodeURI(filename.value)})\n`
|
||||
switch (position.type) {
|
||||
case 'selection':
|
||||
markdownEditor.value.putText(insertedLink)
|
||||
break
|
||||
case 'coords':
|
||||
markdownEditor.value.putTextAtCoord(insertedLink, position.coords)
|
||||
break
|
||||
}
|
||||
try {
|
||||
const blob = await blobPromise
|
||||
const uploadResult = await projectFiles.writeFileBinary(path, blob)
|
||||
if (!uploadResult.ok)
|
||||
uploadErrorToast.reportError(uploadResult.error, 'Failed to upload image')
|
||||
} finally {
|
||||
currentlyUploading.delete(id)
|
||||
}
|
||||
}
|
||||
|
||||
return { transformImageUrl, uploadImage }
|
||||
}
|
||||
|
||||
const fullscreen = ref(false)
|
||||
@ -81,6 +158,55 @@ watch(
|
||||
() => fullscreen.value || fullscreenAnimating.value,
|
||||
(fullscreenOrAnimating) => emit('update:fullscreen', fullscreenOrAnimating),
|
||||
)
|
||||
|
||||
const supportedImageTypes: Record<string, { extension: string }> = {
|
||||
// List taken from https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types
|
||||
'image/apng': { extension: 'apng' },
|
||||
'image/avif': { extension: 'avif' },
|
||||
'image/gif': { extension: 'gif' },
|
||||
'image/jpeg': { extension: 'jpg' },
|
||||
'image/png': { extension: 'png' },
|
||||
'image/svg+xml': { extension: 'svg' },
|
||||
'image/webp': { extension: 'webp' },
|
||||
// Question: do we want to have BMP and ICO here?
|
||||
}
|
||||
|
||||
async function handleFileDrop(event: DragEvent) {
|
||||
if (!event.dataTransfer?.items) return
|
||||
for (const item of event.dataTransfer.items) {
|
||||
if (item.kind !== 'file' || !Object.hasOwn(supportedImageTypes, item.type)) continue
|
||||
const file = item.getAsFile()
|
||||
if (!file) continue
|
||||
const clientPos = new Vec2(event.clientX, event.clientY)
|
||||
event.stopPropagation()
|
||||
event.preventDefault()
|
||||
await uploadImage(file.name, Promise.resolve(file), { type: 'coords', coords: clientPos })
|
||||
}
|
||||
}
|
||||
|
||||
const handler = documentationEditorBindings.handler({
|
||||
paste: () => {
|
||||
window.navigator.clipboard.read().then(async (items) => {
|
||||
if (markdownEditor.value == null) return
|
||||
for (const item of items) {
|
||||
const textType = item.types.find((type) => type === 'text/plain')
|
||||
if (textType) {
|
||||
const blob = await item.getType(textType)
|
||||
markdownEditor.value.putText(await blob.text())
|
||||
break
|
||||
}
|
||||
const imageType = item.types.find((type) => type in supportedImageTypes)
|
||||
if (imageType) {
|
||||
const ext = supportedImageTypes[imageType]?.extension ?? ''
|
||||
uploadImage(`image.${ext}`, item.getType(imageType)).catch((err) =>
|
||||
uploadErrorToast.show(`Failed to upload image: ${err}`),
|
||||
)
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
@ -89,8 +215,14 @@ watch(
|
||||
<div ref="toolbarElement" class="toolbar">
|
||||
<FullscreenButton v-model="fullscreen" />
|
||||
</div>
|
||||
<div class="scrollArea">
|
||||
<div
|
||||
class="scrollArea"
|
||||
@keydown="handler"
|
||||
@dragover.prevent
|
||||
@drop.prevent="handleFileDrop($event)"
|
||||
>
|
||||
<MarkdownEditor
|
||||
ref="markdownEditor"
|
||||
:yText="yText"
|
||||
:transformImageUrl="transformImageUrl"
|
||||
:toolbarContainer="toolbarElement"
|
||||
|
@ -664,11 +664,6 @@ async function handleFileDrop(event: DragEvent) {
|
||||
const MULTIPLE_FILES_GAP = 50
|
||||
|
||||
if (!event.dataTransfer?.items) return
|
||||
const projectRootId = await projectStore.projectRootId
|
||||
if (projectRootId == null) {
|
||||
toasts.userActionFailed.show(`Unable to upload file(s): Could not identify project root.`)
|
||||
return
|
||||
}
|
||||
;[...event.dataTransfer.items].forEach(async (item, index) => {
|
||||
if (item.kind === 'file') {
|
||||
const file = item.getAsFile()
|
||||
@ -677,10 +672,7 @@ async function handleFileDrop(event: DragEvent) {
|
||||
const offset = new Vec2(0, index * -MULTIPLE_FILES_GAP)
|
||||
const pos = graphNavigator.clientToScenePos(clientPos).add(offset)
|
||||
const uploader = Uploader.Create(
|
||||
projectStore.lsRpcConnection,
|
||||
projectStore.dataConnection,
|
||||
projectRootId,
|
||||
projectStore.awareness,
|
||||
projectStore,
|
||||
file,
|
||||
pos,
|
||||
projectStore.isOnLocalBackend,
|
||||
|
@ -1,15 +1,14 @@
|
||||
import { Awareness } from '@/stores/awareness'
|
||||
import { ProjectFiles, useProjectFiles } from '@/stores/projectFiles'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import type { DataServer } from '@/util/net/dataServer'
|
||||
import { Keccak, sha3_224 as SHA3 } from '@noble/hashes/sha3'
|
||||
import type { Hash } from '@noble/hashes/utils'
|
||||
import { bytesToHex } from '@noble/hashes/utils'
|
||||
import { markRaw, toRaw } from 'vue'
|
||||
import { escapeTextLiteral } from 'ydoc-shared/ast/text'
|
||||
import type { LanguageServer } from 'ydoc-shared/languageServer'
|
||||
import { ErrorCode, RemoteRpcError } from 'ydoc-shared/languageServer'
|
||||
import type { Path, StackItem, Uuid } from 'ydoc-shared/languageServerTypes'
|
||||
import { Err, Ok, withContext, type Result } from 'ydoc-shared/util/data/result'
|
||||
import { Err, Ok, type Result } from 'ydoc-shared/util/data/result'
|
||||
|
||||
// === Constants ===
|
||||
|
||||
@ -47,13 +46,17 @@ export class Uploader {
|
||||
private checksum: Hash<Keccak>
|
||||
private uploadedBytes: bigint
|
||||
private stackItem: StackItem
|
||||
private awareness: Awareness
|
||||
private projectFiles: ProjectFiles
|
||||
|
||||
private constructor(
|
||||
private rpc: LanguageServer,
|
||||
private binary: DataServer,
|
||||
private awareness: Awareness,
|
||||
projectStore: {
|
||||
projectRootId: Promise<Uuid | undefined>
|
||||
lsRpcConnection: LanguageServer
|
||||
dataConnection: DataServer
|
||||
awareness: Awareness
|
||||
},
|
||||
private file: File,
|
||||
private projectRootId: Uuid,
|
||||
private position: Vec2,
|
||||
private isOnLocalBackend: boolean,
|
||||
private disableDirectRead: boolean,
|
||||
@ -62,14 +65,18 @@ export class Uploader {
|
||||
this.checksum = SHA3.create()
|
||||
this.uploadedBytes = BigInt(0)
|
||||
this.stackItem = markRaw(toRaw(stackItem))
|
||||
this.awareness = projectStore.awareness
|
||||
this.projectFiles = useProjectFiles(projectStore)
|
||||
}
|
||||
|
||||
/** Constructor */
|
||||
static Create(
|
||||
rpc: LanguageServer,
|
||||
binary: DataServer,
|
||||
projectRootId: Uuid,
|
||||
awareness: Awareness,
|
||||
projectStore: {
|
||||
projectRootId: Promise<Uuid | undefined>
|
||||
lsRpcConnection: LanguageServer
|
||||
dataConnection: DataServer
|
||||
awareness: Awareness
|
||||
},
|
||||
file: File,
|
||||
position: Vec2,
|
||||
isOnLocalBackend: boolean,
|
||||
@ -77,11 +84,8 @@ export class Uploader {
|
||||
stackItem: StackItem,
|
||||
): Uploader {
|
||||
return new Uploader(
|
||||
rpc,
|
||||
binary,
|
||||
awareness,
|
||||
projectStore,
|
||||
file,
|
||||
projectRootId,
|
||||
position,
|
||||
isOnLocalBackend,
|
||||
disableDirectRead,
|
||||
@ -100,20 +104,29 @@ export class Uploader {
|
||||
) {
|
||||
return Ok({ source: 'FileSystemRoot', name: this.file.path })
|
||||
}
|
||||
const dataDirExists = await this.ensureDataDirExists()
|
||||
const rootId = await this.projectFiles.projectRootId
|
||||
if (rootId == null) return Err('Could not identify project root.')
|
||||
const dataDirPath = { rootId, segments: [DATA_DIR_NAME] }
|
||||
const dataDirExists = await this.projectFiles.ensureDirExists(dataDirPath)
|
||||
if (!dataDirExists.ok) return dataDirExists
|
||||
const name = await this.pickUniqueName(this.file.name)
|
||||
const name = await this.projectFiles.pickUniqueName(dataDirPath, this.file.name)
|
||||
if (!name.ok) return name
|
||||
this.awareness.addOrUpdateUpload(name.value, {
|
||||
sizePercentage: 0,
|
||||
position: this.position,
|
||||
stackItem: this.stackItem,
|
||||
})
|
||||
const remotePath: Path = { rootId: this.projectRootId, segments: [DATA_DIR_NAME, name.value] }
|
||||
const remotePath: Path = { rootId, segments: [DATA_DIR_NAME, name.value] }
|
||||
const cleanup = this.cleanup.bind(this, name.value)
|
||||
const writableStream = new WritableStream<Uint8Array>({
|
||||
write: async (chunk: Uint8Array) => {
|
||||
await this.binary.writeBytes(remotePath, this.uploadedBytes, false, chunk)
|
||||
const result = await this.projectFiles.writeBytes(
|
||||
remotePath,
|
||||
this.uploadedBytes,
|
||||
false,
|
||||
chunk,
|
||||
)
|
||||
if (!result.ok) throw result.error
|
||||
this.checksum.update(chunk)
|
||||
this.uploadedBytes += BigInt(chunk.length)
|
||||
const bytes = Number(this.uploadedBytes)
|
||||
@ -127,13 +140,13 @@ export class Uploader {
|
||||
close: cleanup,
|
||||
abort: async (reason: string) => {
|
||||
cleanup()
|
||||
await this.rpc.deleteFile(remotePath)
|
||||
await this.projectFiles.deleteFile(remotePath)
|
||||
throw new Error(`Uploading process aborted. ${reason}`)
|
||||
},
|
||||
})
|
||||
// Disabled until https://github.com/enso-org/enso/issues/6691 is fixed.
|
||||
// Plus, handle the error here, as it should be displayed to the user.
|
||||
// uploader.assertChecksum(remotePath)
|
||||
// this.projectFiles.assertChecksum(remotePath)
|
||||
await this.file.stream().pipeTo(writableStream)
|
||||
return Ok({ source: 'Project', name: name.value })
|
||||
}
|
||||
@ -141,76 +154,4 @@ export class Uploader {
|
||||
private cleanup(name: string) {
|
||||
this.awareness.removeUpload(name)
|
||||
}
|
||||
|
||||
private async assertChecksum(path: Path): Promise<Result<void>> {
|
||||
const engineChecksum = await this.rpc.fileChecksum(path)
|
||||
if (!engineChecksum.ok) return engineChecksum
|
||||
const hexChecksum = bytesToHex(this.checksum.digest())
|
||||
if (hexChecksum != engineChecksum.value.checksum) {
|
||||
return Err(
|
||||
`Uploading file failed, checksum does not match. ${hexChecksum} != ${engineChecksum.value.checksum}`,
|
||||
)
|
||||
} else {
|
||||
return Ok()
|
||||
}
|
||||
}
|
||||
|
||||
private dataDirPath(): Path {
|
||||
return { rootId: this.projectRootId, segments: [DATA_DIR_NAME] }
|
||||
}
|
||||
|
||||
private async ensureDataDirExists() {
|
||||
const exists = await this.dataDirExists()
|
||||
if (!exists.ok) return exists
|
||||
if (exists.value) return Ok()
|
||||
return await withContext(
|
||||
() => 'When creating directory for uploaded file',
|
||||
async () => {
|
||||
return await this.rpc.createFile({
|
||||
type: 'Directory',
|
||||
name: DATA_DIR_NAME,
|
||||
path: { rootId: this.projectRootId, segments: [] },
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
private async dataDirExists(): Promise<Result<boolean>> {
|
||||
const info = await this.rpc.fileInfo(this.dataDirPath())
|
||||
if (info.ok) return Ok(info.value.attributes.kind.type == 'Directory')
|
||||
else if (
|
||||
info.error.payload.cause instanceof RemoteRpcError &&
|
||||
(info.error.payload.cause.code === ErrorCode.FILE_NOT_FOUND ||
|
||||
info.error.payload.cause.code === ErrorCode.CONTENT_ROOT_NOT_FOUND)
|
||||
) {
|
||||
return Ok(false)
|
||||
} else {
|
||||
return info
|
||||
}
|
||||
}
|
||||
|
||||
private async pickUniqueName(suggestedName: string): Promise<Result<string>> {
|
||||
const files = await this.rpc.listFiles(this.dataDirPath())
|
||||
if (!files.ok) return files
|
||||
const existingNames = new Set(files.value.paths.map((path) => path.name))
|
||||
const { stem, extension = '' } = splitFilename(suggestedName)
|
||||
let candidate = suggestedName
|
||||
let num = 1
|
||||
while (existingNames.has(candidate)) {
|
||||
candidate = `${stem}_${num}.${extension}`
|
||||
num += 1
|
||||
}
|
||||
return Ok(candidate)
|
||||
}
|
||||
}
|
||||
|
||||
/** Split filename into stem and (optional) extension. */
|
||||
function splitFilename(fileName: string): { stem: string; extension?: string } {
|
||||
const dotIndex = fileName.lastIndexOf('.')
|
||||
if (dotIndex !== -1 && dotIndex !== 0) {
|
||||
const stem = fileName.substring(0, dotIndex)
|
||||
const extension = fileName.substring(dotIndex + 1)
|
||||
return { stem, extension }
|
||||
}
|
||||
return { stem: fileName }
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
<script setup lang="ts">
|
||||
import type { UrlTransformer } from '@/components/MarkdownEditor/imageUrlTransformer'
|
||||
import { defineAsyncComponent } from 'vue'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import { ComponentInstance, computed, defineAsyncComponent, ref } from 'vue'
|
||||
import * as Y from 'yjs'
|
||||
|
||||
const props = defineProps<{
|
||||
@ -9,13 +10,25 @@ const props = defineProps<{
|
||||
toolbarContainer: HTMLElement | undefined
|
||||
}>()
|
||||
|
||||
const inner = ref<ComponentInstance<typeof LazyMarkdownEditor>>()
|
||||
|
||||
const LazyMarkdownEditor = defineAsyncComponent(
|
||||
() => import('@/components/MarkdownEditor/MarkdownEditorImpl.vue'),
|
||||
)
|
||||
|
||||
defineExpose({
|
||||
loaded: computed(() => inner.value != null),
|
||||
putText: (text: string) => {
|
||||
inner.value?.putText(text)
|
||||
},
|
||||
putTextAtCoord: (text: string, coords: Vec2) => {
|
||||
inner.value?.putTextAtCoords(text, coords)
|
||||
},
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<Suspense>
|
||||
<LazyMarkdownEditor v-bind="props" />
|
||||
<LazyMarkdownEditor ref="inner" v-bind="props" />
|
||||
</Suspense>
|
||||
</template>
|
||||
|
@ -40,5 +40,10 @@ onUnmounted(() => {
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<img :src="data?.ok ? data.value.url : ''" :alt="alt" :title="title" />
|
||||
<img
|
||||
:src="data?.ok ? data.value.url : ''"
|
||||
:alt="alt"
|
||||
:title="title"
|
||||
:class="{ uploading: data?.ok && data.value.uploading?.value }"
|
||||
/>
|
||||
</template>
|
||||
|
@ -7,7 +7,8 @@ import {
|
||||
} from '@/components/MarkdownEditor/imageUrlTransformer'
|
||||
import { ensoMarkdown } from '@/components/MarkdownEditor/markdown'
|
||||
import VueComponentHost from '@/components/VueComponentHost.vue'
|
||||
import { EditorState } from '@codemirror/state'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import { EditorState, Text } from '@codemirror/state'
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { minimalSetup } from 'codemirror'
|
||||
import { type ComponentInstance, onMounted, ref, toRef, useCssModule, watch } from 'vue'
|
||||
@ -48,6 +49,31 @@ onMounted(() => {
|
||||
})
|
||||
|
||||
const editing = ref(false)
|
||||
|
||||
/**
|
||||
* Replace text in given document range with `text`, putting text cursor after inserted text.
|
||||
*
|
||||
* If text contains multiple lines, it should use '\n', not '\r\n' for line endings.
|
||||
*/
|
||||
function putTextAt(text: string, from: number, to: number) {
|
||||
const insert = Text.of(text.split('\n'))
|
||||
editorView.dispatch({
|
||||
changes: { from, to, insert },
|
||||
selection: { anchor: from + insert.length },
|
||||
})
|
||||
}
|
||||
|
||||
defineExpose({
|
||||
putText: (text: string) => {
|
||||
const range = editorView.state.selection.main
|
||||
putTextAt(text, range.from, range.to)
|
||||
},
|
||||
putTextAt,
|
||||
putTextAtCoords: (text: string, coords: Vec2) => {
|
||||
const pos = editorView.posAtCoords(coords, false)
|
||||
putTextAt(text, pos, pos)
|
||||
},
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
@ -70,6 +96,10 @@ const editing = ref(false)
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
|
||||
:deep(img.uploading) {
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
.EditorRoot :deep(.cm-editor) {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
|
@ -104,6 +104,21 @@ test.each([
|
||||
alt: '',
|
||||
},
|
||||
},
|
||||
{
|
||||
markdown: '![](<https://www.example.com/The image.avif>)',
|
||||
image: {
|
||||
src: 'https://www.example.com/The image.avif',
|
||||
alt: '',
|
||||
},
|
||||
},
|
||||
{
|
||||
markdown: '![](<https://www.example.com/The image.avif)',
|
||||
image: null,
|
||||
},
|
||||
{
|
||||
markdown: '![](https://www.example.com/The image.avif)',
|
||||
image: null,
|
||||
},
|
||||
{
|
||||
markdown: '![Image](https://www.example.com/image.avif',
|
||||
image: null,
|
||||
|
@ -1,9 +1,22 @@
|
||||
import { createContextStore } from '@/providers'
|
||||
import type { ToValue } from '@/util/reactivity'
|
||||
import { toValue } from 'vue'
|
||||
import { Ref, toValue } from 'vue'
|
||||
import { mapOk, Ok, type Result } from 'ydoc-shared/util/data/result'
|
||||
|
||||
export type TransformUrlResult = Result<{ url: string; dispose?: () => void }>
|
||||
/**
|
||||
* A transformed URL.
|
||||
*
|
||||
* Once the returned URL is not used anymore, `dispose` callback is called allowing release
|
||||
* any resource under that URL.
|
||||
*
|
||||
* `uploading` is set to true while the image is being uploaded to its target destination
|
||||
* (as part of pasting image, for example).
|
||||
*/
|
||||
export type TransformUrlResult = Result<{
|
||||
url: string
|
||||
dispose?: () => void
|
||||
uploading?: Ref<boolean>
|
||||
}>
|
||||
export type UrlTransformer = (url: string) => Promise<TransformUrlResult>
|
||||
|
||||
export {
|
||||
@ -22,11 +35,17 @@ type Url = string
|
||||
export interface ResourceInfo<T> {
|
||||
location: T
|
||||
uniqueId: ResourceId
|
||||
uploading?: Ref<boolean>
|
||||
}
|
||||
export type ResourceLocator<T> = (url: Url) => Promise<Result<ResourceInfo<T>> | undefined>
|
||||
export type ResourceFetcher<T> = (locator: T) => Promise<Result<Blob>>
|
||||
|
||||
/** TODO: Add docs */
|
||||
/**
|
||||
* Create {@link UrlTransformer} which fetches and caches the image. Returns a URL created
|
||||
* with `URL.createObjectURL`.
|
||||
*
|
||||
* May be used in cases, when the image is not available for browser through HTTP protocol.
|
||||
*/
|
||||
export function fetcherUrlTransformer<ResourceLocation>(
|
||||
locateResource: ResourceLocator<ResourceLocation>,
|
||||
fetchResource: ResourceFetcher<ResourceLocation>,
|
||||
@ -48,7 +67,7 @@ export function fetcherUrlTransformer<ResourceLocation>(
|
||||
} else if (!resource.ok) {
|
||||
return resource
|
||||
} else {
|
||||
const { uniqueId, location } = resource.value
|
||||
const { uniqueId, location, uploading } = resource.value
|
||||
const result = await (allocatedUrls.get(uniqueId) ?? startFetch(uniqueId, location))
|
||||
if (!result.ok) {
|
||||
// Changes to external state may allow a future attempt to succeed.
|
||||
@ -64,6 +83,7 @@ export function fetcherUrlTransformer<ResourceLocation>(
|
||||
allocatedUrls.delete(uniqueId)
|
||||
}
|
||||
},
|
||||
uploading,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -144,12 +144,10 @@ function parseLinkLike(node: SyntaxNode, doc: Text) {
|
||||
if (!textOpen) return
|
||||
const textClose = textOpen.nextSibling // ]
|
||||
if (!textClose) return
|
||||
const urlOpen = textClose.nextSibling // (
|
||||
// The parser accepts partial links such as `[Missing url]`.
|
||||
if (!urlOpen) return
|
||||
const urlNode = urlOpen.nextSibling
|
||||
// If the URL is empty, this will be the closing 'LinkMark'.
|
||||
if (urlNode?.name !== 'URL') return
|
||||
const urlNode = findNextSiblingNamed(textClose, 'URL')
|
||||
if (!urlNode) return
|
||||
console.log('RANGE', urlNode.from, urlNode.to)
|
||||
console.log(doc)
|
||||
return {
|
||||
textFrom: textOpen.to,
|
||||
textTo: textClose.from,
|
||||
@ -268,3 +266,11 @@ class ImageWidget extends WidgetType {
|
||||
this.container = undefined
|
||||
}
|
||||
}
|
||||
|
||||
function findNextSiblingNamed(node: SyntaxNode, name: string) {
|
||||
for (let sibling = node.nextSibling; sibling != null; sibling = sibling.nextSibling) {
|
||||
if (sibling.name === name) {
|
||||
return sibling
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -32,4 +32,15 @@ declare module '@lezer/markdown' {
|
||||
writeElements: (elts: readonly Element[], offset?: number) => Buffer
|
||||
finish: (type: number, length: number) => Tree
|
||||
}
|
||||
|
||||
export interface InlineDelimiter {
|
||||
readonly type: DelimiterType
|
||||
readonly from: number
|
||||
readonly to: number
|
||||
side: Mark
|
||||
}
|
||||
|
||||
export interface InlineContext {
|
||||
parts: (Element | InlineDelimiter | null)[]
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,17 @@
|
||||
import { markdown as baseMarkdown, markdownLanguage } from '@codemirror/lang-markdown'
|
||||
import type { Extension } from '@codemirror/state'
|
||||
import type { Tree } from '@lezer/common'
|
||||
import type { BlockContext, BlockParser, Line, MarkdownParser, NodeSpec } from '@lezer/markdown'
|
||||
import type {
|
||||
BlockContext,
|
||||
BlockParser,
|
||||
DelimiterType,
|
||||
InlineContext,
|
||||
InlineDelimiter,
|
||||
InlineParser,
|
||||
Line,
|
||||
MarkdownParser,
|
||||
NodeSpec,
|
||||
} from '@lezer/markdown'
|
||||
import { Element } from '@lezer/markdown'
|
||||
import { assertDefined } from 'ydoc-shared/util/assert'
|
||||
|
||||
@ -18,6 +28,7 @@ export function markdown(): Extension {
|
||||
extensions: [
|
||||
{
|
||||
parseBlock: [headerParser, bulletList, orderedList, blockquoteParser, disableSetextHeading],
|
||||
parseInline: [linkParser, imageParser, linkEndParser],
|
||||
defineNodes: [blockquoteNode],
|
||||
},
|
||||
],
|
||||
@ -138,8 +149,8 @@ const blockquoteNode: NodeSpec = {
|
||||
},
|
||||
}
|
||||
|
||||
function elt(type: number, from: number, to: number): Element {
|
||||
return new (Element as any)(type, from, to)
|
||||
function elt(type: number, from: number, to: number, children?: readonly Element[]): Element {
|
||||
return new (Element as any)(type, from, to, children)
|
||||
}
|
||||
|
||||
function isBlockquote(line: Line) {
|
||||
@ -196,6 +207,212 @@ function getListIndent(line: Line, pos: number) {
|
||||
return indented >= indentAfter + 5 ? indentAfter + 1 : indented
|
||||
}
|
||||
|
||||
// === Link ===
|
||||
|
||||
const enum Mark {
|
||||
None = 0,
|
||||
Open = 1,
|
||||
Close = 2,
|
||||
}
|
||||
|
||||
const LinkStart: DelimiterType = {}
|
||||
const ImageStart: DelimiterType = {}
|
||||
|
||||
const linkParser: InlineParser = {
|
||||
name: 'Link',
|
||||
parse: (cx, next, start) => {
|
||||
return next == 91 /* '[' */ ? cx.addDelimiter(LinkStart, start, start + 1, true, false) : -1
|
||||
},
|
||||
}
|
||||
|
||||
const imageParser: InlineParser = {
|
||||
name: 'Image',
|
||||
parse: (cx, next, start) => {
|
||||
return next == 33 /* '!' */ && cx.char(start + 1) == 91 /* '[' */ ?
|
||||
cx.addDelimiter(ImageStart, start, start + 2, true, false)
|
||||
: -1
|
||||
},
|
||||
}
|
||||
|
||||
const linkEndParser: InlineParser = {
|
||||
name: 'LinkEnd',
|
||||
parse: (cx, next, start) => {
|
||||
if (next != 93 /* ']' */) return -1
|
||||
// Scanning back to the next link/image start marker
|
||||
const openDelim = cx.findOpeningDelimiter(LinkStart) ?? cx.findOpeningDelimiter(ImageStart)
|
||||
if (openDelim == null) return -1
|
||||
const part = cx.parts[openDelim] as InlineDelimiter
|
||||
// If this one has been set invalid (because it would produce
|
||||
// a nested link) or there's no valid link here ignore both.
|
||||
if (
|
||||
!part.side ||
|
||||
(cx.skipSpace(part.to) == start && !/[([]/.test(cx.slice(start + 1, start + 2)))
|
||||
) {
|
||||
cx.parts[openDelim] = null
|
||||
return -1
|
||||
}
|
||||
// Finish the content and replace the entire range in
|
||||
// this.parts with the link/image node.
|
||||
const content = cx.takeContent(openDelim)
|
||||
const link = (cx.parts[openDelim] = finishLink(
|
||||
cx,
|
||||
content,
|
||||
part.type == LinkStart ? getType(cx, 'Link') : getType(cx, 'Image'),
|
||||
part.from,
|
||||
start + 1,
|
||||
))
|
||||
// Set any open-link markers before this link to invalid.
|
||||
if (part.type == LinkStart)
|
||||
for (let j = 0; j < openDelim; j++) {
|
||||
const p = cx.parts[j]
|
||||
if (p != null && !(p instanceof Element) && p.type == LinkStart) p.side = Mark.None
|
||||
}
|
||||
return link.to
|
||||
},
|
||||
}
|
||||
|
||||
function finishLink(
|
||||
cx: InlineContext,
|
||||
content: Element[],
|
||||
type: number,
|
||||
start: number,
|
||||
startPos: number,
|
||||
) {
|
||||
const { text } = cx,
|
||||
next = cx.char(startPos)
|
||||
let endPos = startPos
|
||||
const LinkMarkType = getType(cx, 'LinkMark')
|
||||
const ImageType = getType(cx, 'Image')
|
||||
content.unshift(elt(LinkMarkType, start, start + (type == ImageType ? 2 : 1)))
|
||||
content.push(elt(LinkMarkType, startPos - 1, startPos))
|
||||
if (next == 40 /* '(' */) {
|
||||
let pos = cx.skipSpace(startPos + 1)
|
||||
const dest = parseURL(text, pos - cx.offset, cx.offset, getType(cx, 'URL'), LinkMarkType)
|
||||
let title
|
||||
if (dest) {
|
||||
const last = dest.at(-1)!
|
||||
pos = cx.skipSpace(last.to)
|
||||
// The destination and title must be separated by whitespace
|
||||
if (pos != last.to) {
|
||||
title = parseLinkTitle(text, pos - cx.offset, cx.offset, getType(cx, 'LinkTitle'))
|
||||
if (title) pos = cx.skipSpace(title.to)
|
||||
}
|
||||
}
|
||||
if (cx.char(pos) == 41 /* ')' */) {
|
||||
content.push(elt(LinkMarkType, startPos, startPos + 1))
|
||||
endPos = pos + 1
|
||||
if (dest) content.push(...dest)
|
||||
if (title) content.push(title)
|
||||
content.push(elt(LinkMarkType, pos, endPos))
|
||||
}
|
||||
} else if (next == 91 /* '[' */) {
|
||||
const label = parseLinkLabel(
|
||||
text,
|
||||
startPos - cx.offset,
|
||||
cx.offset,
|
||||
false,
|
||||
getType(cx, 'LinkLabelType'),
|
||||
)
|
||||
if (label) {
|
||||
content.push(label)
|
||||
endPos = label.to
|
||||
}
|
||||
}
|
||||
return elt(type, start, endPos, content)
|
||||
}
|
||||
|
||||
// These return `null` when falling off the end of the input, `false`
|
||||
// when parsing fails otherwise (for use in the incremental link
|
||||
// reference parser).
|
||||
function parseURL(
|
||||
text: string,
|
||||
start: number,
|
||||
offset: number,
|
||||
urlType: number,
|
||||
linkMarkType: number,
|
||||
): null | false | Element[] {
|
||||
const next = text.charCodeAt(start)
|
||||
if (next == 60 /* '<' */) {
|
||||
for (let pos = start + 1; pos < text.length; pos++) {
|
||||
const ch = text.charCodeAt(pos)
|
||||
if (ch == 62 /* '>' */)
|
||||
return [
|
||||
elt(linkMarkType, start + offset, start + offset + 1),
|
||||
elt(urlType, start + offset + 1, pos + offset),
|
||||
elt(linkMarkType, pos + offset, pos + offset + 1),
|
||||
]
|
||||
if (ch == 60 || ch == 10 /* '<\n' */) return false
|
||||
}
|
||||
return null
|
||||
} else {
|
||||
let depth = 0,
|
||||
pos = start
|
||||
for (let escaped = false; pos < text.length; pos++) {
|
||||
const ch = text.charCodeAt(pos)
|
||||
if (isSpace(ch)) {
|
||||
break
|
||||
} else if (escaped) {
|
||||
escaped = false
|
||||
} else if (ch == 40 /* '(' */) {
|
||||
depth++
|
||||
} else if (ch == 41 /* ')' */) {
|
||||
if (!depth) break
|
||||
depth--
|
||||
} else if (ch == 92 /* '\\' */) {
|
||||
escaped = true
|
||||
}
|
||||
}
|
||||
return (
|
||||
pos > start ? [elt(urlType, start + offset, pos + offset)]
|
||||
: pos == text.length ? null
|
||||
: false
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function parseLinkTitle(
|
||||
text: string,
|
||||
start: number,
|
||||
offset: number,
|
||||
linkTitleType: number,
|
||||
): null | false | Element {
|
||||
const next = text.charCodeAt(start)
|
||||
if (next != 39 && next != 34 && next != 40 /* '"\'(' */) return false
|
||||
const end = next == 40 ? 41 : next
|
||||
for (let pos = start + 1, escaped = false; pos < text.length; pos++) {
|
||||
const ch = text.charCodeAt(pos)
|
||||
if (escaped) escaped = false
|
||||
else if (ch == end) return elt(linkTitleType, start + offset, pos + 1 + offset)
|
||||
else if (ch == 92 /* '\\' */) escaped = true
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function parseLinkLabel(
|
||||
text: string,
|
||||
start: number,
|
||||
offset: number,
|
||||
requireNonWS: boolean,
|
||||
linkLabelType: number,
|
||||
): null | false | Element {
|
||||
for (
|
||||
let escaped = false, pos = start + 1, end = Math.min(text.length, pos + 999);
|
||||
pos < end;
|
||||
pos++
|
||||
) {
|
||||
const ch = text.charCodeAt(pos)
|
||||
if (escaped) escaped = false
|
||||
else if (ch == 93 /* ']' */)
|
||||
return requireNonWS ? false : elt(linkLabelType, start + offset, pos + 1 + offset)
|
||||
else {
|
||||
if (requireNonWS && !isSpace(ch)) requireNonWS = false
|
||||
if (ch == 91 /* '[' */) return false
|
||||
else if (ch == 92 /* '\\' */) escaped = true
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// === Debugging ===
|
||||
|
||||
/** Represents the structure of a @{link Tree} in a JSON-compatible format. */
|
||||
|
@ -60,6 +60,12 @@ func2 a =
|
||||
r
|
||||
|
||||
## The main method
|
||||
|
||||
Here we test images:
|
||||
|
||||
![Image](/images/image.png)
|
||||
![Image](../images/image.png)
|
||||
![Image](</images/image.png>)
|
||||
main =
|
||||
five = 5
|
||||
ten = 10
|
||||
@ -84,6 +90,16 @@ const fileTree = {
|
||||
return mainFile
|
||||
},
|
||||
},
|
||||
images: {
|
||||
get 'image.png'() {
|
||||
return new Uint16Array([
|
||||
20617, 18254, 2573, 2586, 0, 3328, 18505, 21060, 0, 768, 0, 768, 772, 0, 41984, 43014, 140,
|
||||
0, 20501, 21580, 65093, 13106, 11262, 64043, 27756, 24571, 64863, 14906, 12030, 65070,
|
||||
10023, 29424, 11222, 0, 4352, 17481, 21569, 55048, 28771, 24661, 4960, 24672, 52, 768, 161,
|
||||
21933, 29603, 124, 0, 18688, 20037, 44612, 24642, 130,
|
||||
]).buffer
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const visualizations = new Map<Uuid, VisualizationConfiguration>()
|
||||
|
@ -31,13 +31,9 @@ import {
|
||||
type WatchSource,
|
||||
type WritableComputedRef,
|
||||
} from 'vue'
|
||||
import {
|
||||
Error as DataError,
|
||||
OutboundPayload,
|
||||
VisualizationUpdate,
|
||||
} from 'ydoc-shared/binaryProtocol'
|
||||
import { OutboundPayload, VisualizationUpdate } from 'ydoc-shared/binaryProtocol'
|
||||
import { LanguageServer } from 'ydoc-shared/languageServer'
|
||||
import type { Diagnostic, ExpressionId, MethodPointer, Path } from 'ydoc-shared/languageServerTypes'
|
||||
import type { Diagnostic, ExpressionId, MethodPointer } from 'ydoc-shared/languageServerTypes'
|
||||
import { type AbortScope } from 'ydoc-shared/util/net'
|
||||
import {
|
||||
DistributedProject,
|
||||
@ -130,7 +126,9 @@ export const { provideFn: provideProjectStore, injectFn: useProjectStore } = cre
|
||||
const clientId = random.uuidv4() as Uuid
|
||||
const lsUrls = resolveLsUrl(config.value)
|
||||
const lsRpcConnection = createLsRpcConnection(clientId, lsUrls.rpcUrl, abort)
|
||||
const contentRoots = lsRpcConnection.contentRoots
|
||||
const projectRootId = lsRpcConnection.contentRoots.then(
|
||||
(roots) => roots.find((root) => root.type === 'Project')?.id,
|
||||
)
|
||||
|
||||
const dataConnection = initializeDataConnection(clientId, lsUrls.dataUrl, abort)
|
||||
const rpcUrl = new URL(lsUrls.rpcUrl)
|
||||
@ -384,22 +382,6 @@ export const { provideFn: provideProjectStore, injectFn: useProjectStore } = cre
|
||||
}
|
||||
})
|
||||
|
||||
const projectRootId = contentRoots.then(
|
||||
(roots) => roots.find((root) => root.type === 'Project')?.id,
|
||||
)
|
||||
|
||||
async function readFileBinary(path: Path): Promise<Result<Blob>> {
|
||||
const result = await dataConnection.readFile(path)
|
||||
if (result instanceof DataError) {
|
||||
return Err(result.message() ?? 'Failed to read file.')
|
||||
}
|
||||
const contents = result.contentsArray()
|
||||
if (contents == null) {
|
||||
return Err('No file contents received.')
|
||||
}
|
||||
return Ok(new Blob([contents]))
|
||||
}
|
||||
|
||||
return proxyRefs({
|
||||
setObservedFileName(name: string) {
|
||||
observedFileName.value = name
|
||||
@ -423,7 +405,6 @@ export const { provideFn: provideProjectStore, injectFn: useProjectStore } = cre
|
||||
computedValueRegistry: markRaw(computedValueRegistry),
|
||||
lsRpcConnection: markRaw(lsRpcConnection),
|
||||
dataConnection: markRaw(dataConnection),
|
||||
readFileBinary,
|
||||
useVisualizationData,
|
||||
isRecordingEnabled,
|
||||
stopCapturingUndo,
|
||||
|
150
app/gui/src/project-view/stores/projectFiles.ts
Normal file
150
app/gui/src/project-view/stores/projectFiles.ts
Normal file
@ -0,0 +1,150 @@
|
||||
import { DataServer } from '@/util/net/dataServer'
|
||||
import { bytesToHex, Hash } from '@noble/hashes/utils'
|
||||
import { Error as DataError } from 'ydoc-shared/binaryProtocol'
|
||||
import { ErrorCode, LanguageServer, RemoteRpcError } from 'ydoc-shared/languageServer'
|
||||
import { Path, Uuid } from 'ydoc-shared/languageServerTypes'
|
||||
import { Err, Ok, Result, withContext } from 'ydoc-shared/util/data/result'
|
||||
|
||||
export type ProjectFiles = ReturnType<typeof useProjectFiles>
|
||||
|
||||
/**
|
||||
* A composable with project files operations.
|
||||
*/
|
||||
export function useProjectFiles(projectStore: {
|
||||
projectRootId: Promise<Uuid | undefined>
|
||||
lsRpcConnection: LanguageServer
|
||||
dataConnection: DataServer
|
||||
}) {
|
||||
const { projectRootId, lsRpcConnection: lsRpc, dataConnection } = projectStore
|
||||
|
||||
async function readFileBinary(path: Path): Promise<Result<Blob>> {
|
||||
const result = await dataConnection.readFile(path)
|
||||
if (result instanceof DataError) {
|
||||
return Err(result.message() ?? 'Failed to read file.')
|
||||
}
|
||||
const contents = result.contentsArray()
|
||||
if (contents == null) {
|
||||
return Err('No file contents received.')
|
||||
}
|
||||
return Ok(new Blob([contents]))
|
||||
}
|
||||
|
||||
async function writeFileBinary(path: Path, content: Blob): Promise<Result> {
|
||||
const result = await dataConnection.writeFile(path, await content.arrayBuffer())
|
||||
if (result instanceof DataError) {
|
||||
return Err(result.message() ?? 'Failed to write file.')
|
||||
}
|
||||
return Ok()
|
||||
}
|
||||
|
||||
async function writeBytes(
|
||||
path: Path,
|
||||
offset: bigint,
|
||||
overwriteExisting: boolean,
|
||||
contents: string | ArrayBuffer | Uint8Array,
|
||||
): Promise<Result> {
|
||||
const result = await dataConnection.writeBytes(path, offset, overwriteExisting, contents)
|
||||
if (result instanceof DataError) {
|
||||
return Err(result.message() ?? 'Failed to write bytes.')
|
||||
}
|
||||
return Ok()
|
||||
}
|
||||
|
||||
async function deleteFile(path: Path) {
|
||||
return lsRpc.deleteFile(path)
|
||||
}
|
||||
|
||||
/** Check if directory exists and try to create one if missing. */
|
||||
async function ensureDirExists(path: Path): Promise<Result<void>> {
|
||||
const exists = await dirExists(path)
|
||||
if (!exists.ok) return exists
|
||||
if (exists.value) return Ok()
|
||||
|
||||
const name = path.segments.at(-1)
|
||||
if (name == null) return Err('Cannot create context root')
|
||||
|
||||
return await withContext(
|
||||
() => 'When creating directory for uploaded file',
|
||||
async () => {
|
||||
return await lsRpc.createFile({
|
||||
type: 'Directory',
|
||||
name,
|
||||
path: { rootId: path.rootId, segments: path.segments.slice(0, -1) },
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if directory exists. If it does not, or it is a file, `Ok(false)` is returned.
|
||||
* In case of error, the directory existence is not confirmed nor disproved.
|
||||
*/
|
||||
async function dirExists(path: Path): Promise<Result<boolean>> {
|
||||
const info = await lsRpc.fileInfo(path)
|
||||
if (info.ok) return Ok(info.value.attributes.kind.type == 'Directory')
|
||||
else if (
|
||||
info.error.payload.cause instanceof RemoteRpcError &&
|
||||
(info.error.payload.cause.code === ErrorCode.FILE_NOT_FOUND ||
|
||||
info.error.payload.cause.code === ErrorCode.CONTENT_ROOT_NOT_FOUND)
|
||||
) {
|
||||
return Ok(false)
|
||||
} else {
|
||||
return info
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a name for a file which does not collide with existing files in `path`.
|
||||
*
|
||||
* First choice is `suggestedName`, and then try to apply a numeric suffix to stem.
|
||||
*/
|
||||
async function pickUniqueName(path: Path, suggestedName: string): Promise<Result<string>> {
|
||||
const files = await lsRpc.listFiles(path)
|
||||
if (!files.ok) return files
|
||||
const existingNames = new Set(files.value.paths.map((path) => path.name))
|
||||
const { stem, extension = '' } = splitFilename(suggestedName)
|
||||
let candidate = suggestedName
|
||||
let num = 1
|
||||
while (existingNames.has(candidate)) {
|
||||
candidate = `${stem}_${num}.${extension}`
|
||||
num += 1
|
||||
}
|
||||
return Ok(candidate)
|
||||
}
|
||||
|
||||
async function assertChecksum<T extends Hash<T>>(
|
||||
path: Path,
|
||||
checksum: Hash<T>,
|
||||
): Promise<Result<void>> {
|
||||
const engineChecksum = await lsRpc.fileChecksum(path)
|
||||
if (!engineChecksum.ok) return engineChecksum
|
||||
const hexChecksum = bytesToHex(checksum.digest())
|
||||
if (hexChecksum != engineChecksum.value.checksum) {
|
||||
return Err(`Checksum does not match. ${hexChecksum} != ${engineChecksum.value.checksum}`)
|
||||
} else {
|
||||
return Ok()
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
projectRootId,
|
||||
readFileBinary,
|
||||
writeFileBinary,
|
||||
writeBytes,
|
||||
deleteFile,
|
||||
ensureDirExists,
|
||||
pickUniqueName,
|
||||
assertChecksum,
|
||||
}
|
||||
}
|
||||
|
||||
/** Split filename into stem and (optional) extension. */
|
||||
function splitFilename(fileName: string): { stem: string; extension?: string } {
|
||||
const dotIndex = fileName.lastIndexOf('.')
|
||||
if (dotIndex !== -1 && dotIndex !== 0) {
|
||||
const stem = fileName.substring(0, dotIndex)
|
||||
const extension = fileName.substring(dotIndex + 1)
|
||||
return { stem, extension }
|
||||
}
|
||||
return { stem: fileName }
|
||||
}
|
@ -160,10 +160,15 @@ export class DataServer extends ObservableV2<DataServerEvents> {
|
||||
return initResult.error.payload
|
||||
}
|
||||
}
|
||||
this.websocket.send(builder.finish(rootTable).toArrayBuffer())
|
||||
const promise = new Promise<T | Error>((resolve) => {
|
||||
this.resolveCallbacks.set(messageUuid, resolve)
|
||||
})
|
||||
try {
|
||||
this.websocket.send(builder.finish(rootTable).toArrayBuffer())
|
||||
} catch (e: unknown) {
|
||||
this.resolveCallbacks.delete(messageUuid)
|
||||
throw e
|
||||
}
|
||||
return promise
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
// We are using `react-toastify`, since we share toast environment with dashboard.
|
||||
import type { ResultError } from '@/util/data/result'
|
||||
import { uuidv4 } from 'lib0/random'
|
||||
// We are using `react-toastify`, since we share toast environment with dashboard.
|
||||
import { toast, type ToastContent, type ToastOptions, type TypeOptions } from 'react-toastify'
|
||||
import { onScopeDispose } from 'vue'
|
||||
|
||||
@ -15,7 +15,25 @@ export interface UseToastOptions extends ToastOptions {
|
||||
outliveScope?: boolean
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
/**
|
||||
* Composable for new toast - a pop-up message displayed to the user.
|
||||
*
|
||||
* ```ts
|
||||
* // useToast.error is an equivalent of useToast(type: 'error').
|
||||
* // There's also useToast.info, useToast.warning and useToast.success.
|
||||
* const toastLspError = useToast.error()
|
||||
* // Every `useToast` allow displaying only one message at once, so
|
||||
* // here we create separate toast for every "topic".
|
||||
* const toastExecutionFailed = useToast.error()
|
||||
* const toastUserActionFailed = useToast.error()
|
||||
* // Toast are automatically closed after some time. Here we suppress this.
|
||||
* const toastStartup = useToast.info({ autoClose: false })
|
||||
* const toastConnectionLost = useToast.error({ autoClose: false })
|
||||
*
|
||||
* ```
|
||||
*
|
||||
* For details, read about `toastify` library.
|
||||
*/
|
||||
export function useToast(options: UseToastOptions = {}) {
|
||||
const id = makeToastId()
|
||||
if (options?.outliveScope !== true) {
|
||||
@ -23,15 +41,18 @@ export function useToast(options: UseToastOptions = {}) {
|
||||
}
|
||||
|
||||
return {
|
||||
/** Show or update toast. */
|
||||
show(content: ToastContent) {
|
||||
if (toast.isActive(id)) toast.update(id, { ...options, render: content })
|
||||
else toast(content, { ...options, toastId: id })
|
||||
},
|
||||
/** A helper for reporting {@link ResultError} to both toast and console. */
|
||||
reportError<E>(result: ResultError<E>, preamble?: string) {
|
||||
const msg = result.message(preamble)
|
||||
console.error(msg)
|
||||
this.show(msg)
|
||||
},
|
||||
/** Dismiss the displayed toast. */
|
||||
dismiss() {
|
||||
toast.dismiss(id)
|
||||
},
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["ES2021", "DOM", "DOM.Iterable"],
|
||||
"lib": ["ES2022", "DOM", "DOM.Iterable"],
|
||||
"composite": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true,
|
||||
@ -19,7 +19,7 @@
|
||||
"resolveJsonModule": true,
|
||||
"sourceMap": true,
|
||||
"skipLibCheck": true,
|
||||
"target": "ES2021",
|
||||
"target": "ES2022",
|
||||
"jsx": "react-jsx"
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user