mirror of
https://github.com/enso-org/enso.git
synced 2024-11-22 03:32:23 +03:00
Merge branch 'develop' into wip/db/11481-intersection-type-update
This commit is contained in:
commit
d4a56cbc3d
1
.github/workflows/gui.yml
vendored
1
.github/workflows/gui.yml
vendored
@ -512,6 +512,7 @@ jobs:
|
||||
ENSO_TEST_USER: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_USERNAME }}
|
||||
ENSO_TEST_USER_PASSWORD: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_PASSWORD }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
continue-on-error: true
|
||||
- run: rm $HOME/.enso/credentials
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
@ -24,8 +24,11 @@
|
||||
component.][11452]
|
||||
- [New documentation editor provides improved Markdown editing experience, and
|
||||
paves the way for new documentation features.][11469]
|
||||
- [You can now add images to documentation panel][11547] by pasting them from
|
||||
clipboard or by drag'n'dropping image files.
|
||||
- ["Write" button in component menu allows to evaluate it separately from the
|
||||
rest of the workflow][11523].
|
||||
- [The documentation editor can now display tables][11564]
|
||||
|
||||
[11151]: https://github.com/enso-org/enso/pull/11151
|
||||
[11271]: https://github.com/enso-org/enso/pull/11271
|
||||
@ -42,7 +45,9 @@
|
||||
[11448]: https://github.com/enso-org/enso/pull/11448
|
||||
[11452]: https://github.com/enso-org/enso/pull/11452
|
||||
[11469]: https://github.com/enso-org/enso/pull/11469
|
||||
[11547]: https://github.com/enso-org/enso/pull/11547
|
||||
[11523]: https://github.com/enso-org/enso/pull/11523
|
||||
[11564]: https://github.com/enso-org/enso/pull/11564
|
||||
|
||||
#### Enso Standard Library
|
||||
|
||||
|
7
LICENSE
7
LICENSE
@ -199,3 +199,10 @@
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
---
|
||||
|
||||
This project includes components that are licensed under the MIT license. The
|
||||
full text of the MIT license and its copyright notice can be found in the
|
||||
`app/licenses/` directory.
|
||||
|
||||
|
@ -4,9 +4,10 @@ ENSO_CLOUD_API_URL=https://aaaaaaaaaa.execute-api.mars.amazonaws.com
|
||||
ENSO_CLOUD_CHAT_URL=wss://chat.example.com
|
||||
ENSO_CLOUD_SENTRY_DSN=https://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa@o0000000000000000.ingest.sentry.io/0000000000000000
|
||||
ENSO_CLOUD_STRIPE_KEY=pk_test_AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
ENSO_CLOUD_AUTH_ENDPOINT=https://aaaaaaaaaa.execute-api.mars.amazonaws.com/path/to/auth/endpoint
|
||||
ENSO_CLOUD_AMPLIFY_USER_POOL_ID=mars_AAAAAAAAA
|
||||
ENSO_CLOUD_AMPLIFY_USER_POOL_WEB_CLIENT_ID=zzzzzzzzzzzzzzzzzzzzzzzzzz
|
||||
ENSO_CLOUD_AMPLIFY_DOMAIN=somewhere.auth.mars.amazoncognito.com
|
||||
ENSO_CLOUD_AMPLIFY_REGION=mars
|
||||
ENSO_POLYGLOT_YDOC_SERVER=false
|
||||
ENSO_YDOC_LS_DEBUG=false
|
||||
ENSO_YDOC_LS_DEBUG=false
|
||||
|
@ -94,6 +94,7 @@ export function getDefines() {
|
||||
'process.env.ENSO_CLOUD_SENTRY_DSN': stringify(process.env.ENSO_CLOUD_SENTRY_DSN),
|
||||
'process.env.ENSO_CLOUD_STRIPE_KEY': stringify(process.env.ENSO_CLOUD_STRIPE_KEY),
|
||||
'process.env.ENSO_CLOUD_CHAT_URL': stringify(process.env.ENSO_CLOUD_CHAT_URL),
|
||||
'process.env.ENSO_CLOUD_AUTH_ENDPOINT': stringify(process.env.ENSO_CLOUD_AUTH_ENDPOINT),
|
||||
'process.env.ENSO_CLOUD_COGNITO_USER_POOL_ID': stringify(
|
||||
process.env.ENSO_CLOUD_COGNITO_USER_POOL_ID,
|
||||
),
|
||||
|
@ -834,17 +834,9 @@ function createPlaceholderId(from?: string): string {
|
||||
return id as string
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether a given {@link AssetId} is a placeholder id.
|
||||
*/
|
||||
/** Whether a given {@link AssetId} is a placeholder id. */
|
||||
export function isPlaceholderId(id: AssetId) {
|
||||
if (typeof id === 'string') {
|
||||
return false
|
||||
}
|
||||
|
||||
console.log('isPlaceholderId id', id, PLACEHOLDER_SIGNATURE in id)
|
||||
|
||||
return PLACEHOLDER_SIGNATURE in id
|
||||
return typeof id !== 'string' && PLACEHOLDER_SIGNATURE in id
|
||||
}
|
||||
|
||||
/**
|
||||
@ -900,7 +892,7 @@ export function createPlaceholderProjectAsset(
|
||||
title: string,
|
||||
parentId: DirectoryId,
|
||||
assetPermissions: readonly AssetPermission[],
|
||||
organization: User | null,
|
||||
user: User | null,
|
||||
path: Path | null,
|
||||
): ProjectAsset {
|
||||
return {
|
||||
@ -913,7 +905,7 @@ export function createPlaceholderProjectAsset(
|
||||
projectState: {
|
||||
type: ProjectState.new,
|
||||
volumeId: '',
|
||||
...(organization != null ? { openedBy: organization.email } : {}),
|
||||
...(user != null ? { openedBy: user.email } : {}),
|
||||
...(path != null ? { path } : {}),
|
||||
},
|
||||
extension: null,
|
||||
@ -924,6 +916,72 @@ export function createPlaceholderProjectAsset(
|
||||
}
|
||||
}
|
||||
|
||||
/** Creates a {@link DirectoryAsset} using the given values. */
|
||||
export function createPlaceholderDirectoryAsset(
|
||||
title: string,
|
||||
parentId: DirectoryId,
|
||||
assetPermissions: readonly AssetPermission[],
|
||||
): DirectoryAsset {
|
||||
return {
|
||||
type: AssetType.directory,
|
||||
id: DirectoryId(createPlaceholderId()),
|
||||
title,
|
||||
parentId,
|
||||
permissions: assetPermissions,
|
||||
modifiedAt: dateTime.toRfc3339(new Date()),
|
||||
projectState: null,
|
||||
extension: null,
|
||||
labels: [],
|
||||
description: null,
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
}
|
||||
}
|
||||
|
||||
/** Creates a {@link SecretAsset} using the given values. */
|
||||
export function createPlaceholderSecretAsset(
|
||||
title: string,
|
||||
parentId: DirectoryId,
|
||||
assetPermissions: readonly AssetPermission[],
|
||||
): SecretAsset {
|
||||
return {
|
||||
type: AssetType.secret,
|
||||
id: SecretId(createPlaceholderId()),
|
||||
title,
|
||||
parentId,
|
||||
permissions: assetPermissions,
|
||||
modifiedAt: dateTime.toRfc3339(new Date()),
|
||||
projectState: null,
|
||||
extension: null,
|
||||
labels: [],
|
||||
description: null,
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
}
|
||||
}
|
||||
|
||||
/** Creates a {@link DatalinkAsset} using the given values. */
|
||||
export function createPlaceholderDatalinkAsset(
|
||||
title: string,
|
||||
parentId: DirectoryId,
|
||||
assetPermissions: readonly AssetPermission[],
|
||||
): DatalinkAsset {
|
||||
return {
|
||||
type: AssetType.datalink,
|
||||
id: DatalinkId(createPlaceholderId()),
|
||||
title,
|
||||
parentId,
|
||||
permissions: assetPermissions,
|
||||
modifiedAt: dateTime.toRfc3339(new Date()),
|
||||
projectState: null,
|
||||
extension: null,
|
||||
labels: [],
|
||||
description: null,
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SpecialLoadingAsset}, with all irrelevant fields initialized to default
|
||||
* values.
|
||||
|
@ -804,8 +804,8 @@
|
||||
"arbitraryFieldInvalid": "This field is invalid",
|
||||
"arbitraryFieldTooShort": "This field is too short",
|
||||
"arbitraryFieldTooLong": "This field is too long",
|
||||
"arbitraryFieldTooSmall": "The value is too small, the minimum is $0",
|
||||
"arbitraryFieldTooLarge": "The value is too large, the maximum is $0",
|
||||
"arbitraryFieldTooSmall": "The value must be greater than $0",
|
||||
"arbitraryFieldTooLarge": "The value must be less than $0",
|
||||
"arbitraryFieldNotEqual": "This field is not equal to another field",
|
||||
"arbitraryFieldNotMatch": "This field does not match the pattern",
|
||||
"arbitraryFieldNotMatchAny": "This field does not match any of the patterns",
|
||||
|
@ -12,7 +12,6 @@ import * as uniqueString from 'enso-common/src/utilities/uniqueString'
|
||||
|
||||
import * as actions from './actions'
|
||||
|
||||
import invariant from 'tiny-invariant'
|
||||
import LATEST_GITHUB_RELEASES from './latestGithubReleases.json' with { type: 'json' }
|
||||
|
||||
// =================
|
||||
@ -170,12 +169,15 @@ async function mockApiInternal({ page, setupAPI }: MockParams) {
|
||||
type: backend.AssetType.directory,
|
||||
id: backend.DirectoryId('directory-' + uniqueString.uniqueString()),
|
||||
projectState: null,
|
||||
extension: null,
|
||||
title,
|
||||
modifiedAt: dateTime.toRfc3339(new Date()),
|
||||
description: null,
|
||||
labels: [],
|
||||
parentId: defaultDirectoryId,
|
||||
permissions: [],
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
},
|
||||
rest,
|
||||
)
|
||||
@ -192,12 +194,15 @@ async function mockApiInternal({ page, setupAPI }: MockParams) {
|
||||
type: backend.ProjectState.closed,
|
||||
volumeId: '',
|
||||
},
|
||||
extension: null,
|
||||
title,
|
||||
modifiedAt: dateTime.toRfc3339(new Date()),
|
||||
description: null,
|
||||
labels: [],
|
||||
parentId: defaultDirectoryId,
|
||||
permissions: [],
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
},
|
||||
rest,
|
||||
)
|
||||
@ -208,12 +213,15 @@ async function mockApiInternal({ page, setupAPI }: MockParams) {
|
||||
type: backend.AssetType.file,
|
||||
id: backend.FileId('file-' + uniqueString.uniqueString()),
|
||||
projectState: null,
|
||||
extension: '',
|
||||
title,
|
||||
modifiedAt: dateTime.toRfc3339(new Date()),
|
||||
description: null,
|
||||
labels: [],
|
||||
parentId: defaultDirectoryId,
|
||||
permissions: [],
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
},
|
||||
rest,
|
||||
)
|
||||
@ -227,12 +235,15 @@ async function mockApiInternal({ page, setupAPI }: MockParams) {
|
||||
type: backend.AssetType.secret,
|
||||
id: backend.SecretId('secret-' + uniqueString.uniqueString()),
|
||||
projectState: null,
|
||||
extension: null,
|
||||
title,
|
||||
modifiedAt: dateTime.toRfc3339(new Date()),
|
||||
description: null,
|
||||
labels: [],
|
||||
parentId: defaultDirectoryId,
|
||||
permissions: [],
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
},
|
||||
rest,
|
||||
)
|
||||
@ -571,23 +582,21 @@ async function mockApiInternal({ page, setupAPI }: MockParams) {
|
||||
const projectId = backend.ProjectId(request.url().match(/[/]projects[/]([^?/]+)/)?.[1] ?? '')
|
||||
const project = assetMap.get(projectId)
|
||||
|
||||
invariant(
|
||||
project,
|
||||
`Cannot get details for a project that does not exist. Project ID: ${projectId} \n
|
||||
if (!project) {
|
||||
throw new Error(`Cannot get details for a project that does not exist. Project ID: ${projectId} \n
|
||||
Please make sure that you've created the project before opening it.
|
||||
------------------------------------------------------------------------------------------------
|
||||
|
||||
Existing projects: ${Array.from(assetMap.values())
|
||||
.filter((asset) => asset.type === backend.AssetType.project)
|
||||
.map((asset) => asset.id)
|
||||
.join(', ')}`,
|
||||
)
|
||||
invariant(
|
||||
project.projectState,
|
||||
`Attempting to get a project that does not have a state. Usually it is a bug in the application.
|
||||
.join(', ')}`)
|
||||
}
|
||||
if (!project.projectState) {
|
||||
throw new Error(`Attempting to get a project that does not have a state. Usually it is a bug in the application.
|
||||
------------------------------------------------------------------------------------------------
|
||||
Tried to get: \n ${JSON.stringify(project, null, 2)}`,
|
||||
)
|
||||
Tried to get: \n ${JSON.stringify(project, null, 2)}`)
|
||||
}
|
||||
|
||||
return {
|
||||
organizationId: defaultOrganizationId,
|
||||
@ -635,7 +644,7 @@ async function mockApiInternal({ page, setupAPI }: MockParams) {
|
||||
const body: Body = request.postDataJSON()
|
||||
const parentId = body.parentDirectoryId
|
||||
// Can be any asset ID.
|
||||
const id = backend.DirectoryId(`directory-${uniqueString.uniqueString()}`)
|
||||
const id = backend.DirectoryId(`${assetId?.split('-')[0]}-${uniqueString.uniqueString()}`)
|
||||
const json: backend.CopyAssetResponse = {
|
||||
asset: {
|
||||
id,
|
||||
@ -681,10 +690,11 @@ async function mockApiInternal({ page, setupAPI }: MockParams) {
|
||||
|
||||
const project = assetMap.get(projectId)
|
||||
|
||||
invariant(
|
||||
project,
|
||||
`Tried to open a project that does not exist. Project ID: ${projectId} \n Please make sure that you've created the project before opening it.`,
|
||||
)
|
||||
if (!project) {
|
||||
throw new Error(
|
||||
`Tried to open a project that does not exist. Project ID: ${projectId} \n Please make sure that you've created the project before opening it.`,
|
||||
)
|
||||
}
|
||||
|
||||
if (project?.projectState) {
|
||||
object.unsafeMutable(project.projectState).type = backend.ProjectState.opened
|
||||
|
@ -85,6 +85,7 @@ export const componentBrowser = componentLocator('.ComponentBrowser')
|
||||
export const nodeOutputPort = componentLocator('.outputPortHoverArea')
|
||||
export const smallPlusButton = componentLocator('.SmallPlusButton')
|
||||
export const editorRoot = componentLocator('.EditorRoot')
|
||||
export const nodeComment = componentLocator('.GraphNodeComment div[contentEditable]')
|
||||
|
||||
/**
|
||||
* A not-selected variant of Component Browser Entry.
|
||||
|
@ -33,8 +33,8 @@ test('Copy node with comment', async ({ page }) => {
|
||||
|
||||
// Check state before operation.
|
||||
const originalNodes = await locate.graphNode(page).count()
|
||||
await expect(page.locator('.GraphNodeComment')).toExist()
|
||||
const originalNodeComments = await page.locator('.GraphNodeComment').count()
|
||||
await expect(locate.nodeComment(page)).toExist()
|
||||
const originalNodeComments = await locate.nodeComment(page).count()
|
||||
|
||||
// Select a node.
|
||||
const nodeToCopy = locate.graphNodeByBinding(page, 'final')
|
||||
@ -48,7 +48,7 @@ test('Copy node with comment', async ({ page }) => {
|
||||
|
||||
// Node and comment have been copied.
|
||||
await expect(locate.graphNode(page)).toHaveCount(originalNodes + 1)
|
||||
await expect(page.locator('.GraphNodeComment')).toHaveCount(originalNodeComments + 1)
|
||||
await expect(locate.nodeComment(page)).toHaveCount(originalNodeComments + 1)
|
||||
})
|
||||
|
||||
test('Copy multiple nodes', async ({ page }) => {
|
||||
@ -56,8 +56,8 @@ test('Copy multiple nodes', async ({ page }) => {
|
||||
|
||||
// Check state before operation.
|
||||
const originalNodes = await locate.graphNode(page).count()
|
||||
await expect(page.locator('.GraphNodeComment')).toExist()
|
||||
const originalNodeComments = await page.locator('.GraphNodeComment').count()
|
||||
await expect(locate.nodeComment(page)).toExist()
|
||||
const originalNodeComments = await locate.nodeComment(page).count()
|
||||
|
||||
// Select some nodes.
|
||||
const node1 = locate.graphNodeByBinding(page, 'final')
|
||||
@ -76,7 +76,7 @@ test('Copy multiple nodes', async ({ page }) => {
|
||||
// Nodes and comment have been copied.
|
||||
await expect(locate.graphNode(page)).toHaveCount(originalNodes + 2)
|
||||
// `final` node has a comment.
|
||||
await expect(page.locator('.GraphNodeComment')).toHaveCount(originalNodeComments + 1)
|
||||
await expect(locate.nodeComment(page)).toHaveCount(originalNodeComments + 1)
|
||||
// Check that two copied nodes are isolated, i.e. connected to each other, not original nodes.
|
||||
await expect(locate.graphNodeByBinding(page, 'prod1')).toBeVisible()
|
||||
await expect(locate.graphNodeByBinding(page, 'final1')).toBeVisible()
|
||||
|
75
app/gui/e2e/project-view/nodeComments.spec.ts
Normal file
75
app/gui/e2e/project-view/nodeComments.spec.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import test from 'playwright/test'
|
||||
import * as actions from './actions'
|
||||
import { expect } from './customExpect'
|
||||
import { CONTROL_KEY } from './keyboard'
|
||||
import * as locate from './locate'
|
||||
|
||||
test('Edit comment by click', async ({ page }) => {
|
||||
await actions.goToGraph(page)
|
||||
const nodeComment = locate.nodeComment(locate.graphNodeByBinding(page, 'final'))
|
||||
await expect(nodeComment).toHaveText('This node can be entered')
|
||||
|
||||
await nodeComment.click()
|
||||
await page.keyboard.press(`${CONTROL_KEY}+A`)
|
||||
const NEW_COMMENT = 'New comment text'
|
||||
await nodeComment.fill(NEW_COMMENT)
|
||||
await page.keyboard.press(`Enter`)
|
||||
await expect(nodeComment).not.toBeFocused()
|
||||
await expect(nodeComment).toHaveText(NEW_COMMENT)
|
||||
})
|
||||
|
||||
test('Start editing comment via menu', async ({ page }) => {
|
||||
await actions.goToGraph(page)
|
||||
const node = locate.graphNodeByBinding(page, 'final')
|
||||
await node.click()
|
||||
await locate.circularMenu(node).getByRole('button', { name: 'More' }).click()
|
||||
await locate.circularMenu(node).getByRole('button', { name: 'Comment' }).click()
|
||||
await expect(locate.nodeComment(node)).toBeFocused()
|
||||
})
|
||||
|
||||
test('Add new comment via menu', async ({ page }) => {
|
||||
await actions.goToGraph(page)
|
||||
const INITIAL_NODE_COMMENTS = 1
|
||||
await expect(locate.nodeComment(page)).toHaveCount(INITIAL_NODE_COMMENTS)
|
||||
const node = locate.graphNodeByBinding(page, 'data')
|
||||
const nodeComment = locate.nodeComment(node)
|
||||
|
||||
await node.click()
|
||||
await locate.circularMenu(node).getByRole('button', { name: 'More' }).click()
|
||||
await locate.circularMenu(node).getByRole('button', { name: 'Comment' }).click()
|
||||
await expect(locate.nodeComment(node)).toBeFocused()
|
||||
const NEW_COMMENT = 'New comment text'
|
||||
await nodeComment.fill(NEW_COMMENT)
|
||||
await page.keyboard.press(`Enter`)
|
||||
await expect(nodeComment).not.toBeFocused()
|
||||
await expect(nodeComment).toHaveText(NEW_COMMENT)
|
||||
await expect(locate.nodeComment(page)).toHaveCount(INITIAL_NODE_COMMENTS + 1)
|
||||
})
|
||||
|
||||
test('Delete comment by clearing text', async ({ page }) => {
|
||||
await actions.goToGraph(page)
|
||||
const nodeComment = locate.nodeComment(locate.graphNodeByBinding(page, 'final'))
|
||||
await expect(nodeComment).toHaveText('This node can be entered')
|
||||
|
||||
await nodeComment.click()
|
||||
await page.keyboard.press(`${CONTROL_KEY}+A`)
|
||||
await page.keyboard.press(`Delete`)
|
||||
await page.keyboard.press(`Enter`)
|
||||
await expect(nodeComment).not.toExist()
|
||||
})
|
||||
|
||||
test('URL added to comment is rendered as link', async ({ page }) => {
|
||||
await actions.goToGraph(page)
|
||||
const nodeComment = locate.nodeComment(locate.graphNodeByBinding(page, 'final'))
|
||||
await expect(nodeComment).toHaveText('This node can be entered')
|
||||
await expect(nodeComment.locator('a')).not.toExist()
|
||||
|
||||
await nodeComment.click()
|
||||
await page.keyboard.press(`${CONTROL_KEY}+A`)
|
||||
const NEW_COMMENT = "Here's a URL: https://example.com"
|
||||
await nodeComment.fill(NEW_COMMENT)
|
||||
await page.keyboard.press(`Enter`)
|
||||
await expect(nodeComment).not.toBeFocused()
|
||||
await expect(nodeComment).toHaveText(NEW_COMMENT)
|
||||
await expect(nodeComment.locator('a')).toHaveCount(1)
|
||||
})
|
@ -7,13 +7,18 @@ import * as locate from './locate'
|
||||
test('Main method documentation', async ({ page }) => {
|
||||
await actions.goToGraph(page)
|
||||
|
||||
const rightDock = locate.rightDock(page)
|
||||
// Documentation panel hotkey opens right-dock.
|
||||
await expect(locate.rightDock(page)).toBeHidden()
|
||||
await expect(rightDock).toBeHidden()
|
||||
await page.keyboard.press(`${CONTROL_KEY}+D`)
|
||||
await expect(locate.rightDock(page)).toBeVisible()
|
||||
await expect(rightDock).toBeVisible()
|
||||
|
||||
// Right-dock displays main method documentation.
|
||||
await expect(locate.editorRoot(locate.rightDock(page))).toHaveText('The main method')
|
||||
await expect(locate.editorRoot(rightDock)).toContainText('The main method')
|
||||
// All three images are loaded properly
|
||||
await expect(rightDock.getByAltText('Image')).toHaveCount(3)
|
||||
for (const img of await rightDock.getByAltText('Image').all())
|
||||
await expect(img).toHaveJSProperty('naturalWidth', 3)
|
||||
|
||||
// Documentation hotkey closes right-dock.p
|
||||
await page.keyboard.press(`${CONTROL_KEY}+D`)
|
||||
|
@ -44,7 +44,8 @@ test('Removing node', async ({ page }) => {
|
||||
await page.keyboard.press(`${CONTROL_KEY}+Z`)
|
||||
await expect(locate.graphNode(page)).toHaveCount(nodesCount)
|
||||
await expect(deletedNode.locator('.WidgetToken')).toHaveText(['Main', '.', 'func1', 'prod'])
|
||||
await expect(deletedNode.locator('.GraphNodeComment')).toHaveText('This node can be entered')
|
||||
await expect(locate.nodeComment(deletedNode)).toHaveText('This node can be entered')
|
||||
|
||||
const restoredBBox = await deletedNode.boundingBox()
|
||||
expect(restoredBBox).toEqual(deletedNodeBBox)
|
||||
|
||||
|
2
app/gui/env.d.ts
vendored
2
app/gui/env.d.ts
vendored
@ -218,6 +218,8 @@ declare global {
|
||||
// @ts-expect-error The index signature is intentional to disallow unknown env vars.
|
||||
readonly ENSO_CLOUD_STRIPE_KEY?: string
|
||||
// @ts-expect-error The index signature is intentional to disallow unknown env vars.
|
||||
readonly ENSO_CLOUD_AUTH_ENDPOINT: string
|
||||
// @ts-expect-error The index signature is intentional to disallow unknown env vars.
|
||||
readonly ENSO_CLOUD_COGNITO_USER_POOL_ID: string
|
||||
// @ts-expect-error The index signature is intentional to disallow unknown env vars.
|
||||
readonly ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID: string
|
||||
|
@ -22,7 +22,7 @@
|
||||
"build-cloud": "cross-env CLOUD_BUILD=true corepack pnpm run build",
|
||||
"preview": "vite preview",
|
||||
"//": "max-warnings set to 41 to match the amount of warnings introduced by the new react compiler. Eventual goal is to remove all the warnings.",
|
||||
"lint": "eslint . --max-warnings=41",
|
||||
"lint": "eslint . --max-warnings=39",
|
||||
"format": "prettier --version && prettier --write src/ && eslint . --fix",
|
||||
"dev:vite": "vite",
|
||||
"test": "corepack pnpm run /^^^^test:.*/",
|
||||
@ -94,7 +94,6 @@
|
||||
"@lexical/plain-text": "^0.16.0",
|
||||
"@lexical/utils": "^0.16.0",
|
||||
"@lezer/common": "^1.1.0",
|
||||
"@lezer/markdown": "^1.3.1",
|
||||
"@lezer/highlight": "^1.1.6",
|
||||
"@noble/hashes": "^1.4.0",
|
||||
"@vueuse/core": "^10.4.1",
|
||||
@ -118,7 +117,6 @@
|
||||
"veaury": "^2.3.18",
|
||||
"vue": "^3.5.2",
|
||||
"vue-component-type-helpers": "^2.0.29",
|
||||
"y-codemirror.next": "^0.3.2",
|
||||
"y-protocols": "^1.0.5",
|
||||
"y-textarea": "^1.0.0",
|
||||
"y-websocket": "^1.5.0",
|
||||
|
@ -33,6 +33,7 @@ import * as listen from '#/authentication/listen'
|
||||
*/
|
||||
export interface AmplifyConfig {
|
||||
readonly region: string
|
||||
readonly endpoint: string
|
||||
readonly userPoolId: string
|
||||
readonly userPoolWebClientId: string
|
||||
readonly urlOpener: ((url: string, redirectUrl: string) => void) | null
|
||||
@ -66,6 +67,7 @@ interface OauthAmplifyConfig {
|
||||
/** Same as {@link AmplifyConfig}, but in a format recognized by the AWS Amplify library. */
|
||||
export interface NestedAmplifyConfig {
|
||||
readonly region: string
|
||||
readonly endpoint: string
|
||||
readonly userPoolId: string
|
||||
readonly userPoolWebClientId: string
|
||||
readonly oauth: OauthAmplifyConfig
|
||||
@ -80,6 +82,7 @@ export interface NestedAmplifyConfig {
|
||||
export function toNestedAmplifyConfig(config: AmplifyConfig): NestedAmplifyConfig {
|
||||
return {
|
||||
region: config.region,
|
||||
endpoint: config.endpoint,
|
||||
userPoolId: config.userPoolId,
|
||||
userPoolWebClientId: config.userPoolWebClientId,
|
||||
oauth: {
|
||||
@ -183,6 +186,7 @@ function loadAmplifyConfig(
|
||||
/** Load the platform-specific Amplify configuration. */
|
||||
const signInOutRedirect = supportsDeepLinks ? `${common.DEEP_LINK_SCHEME}://auth` : redirectUrl
|
||||
return {
|
||||
endpoint: process.env.ENSO_CLOUD_AUTH_ENDPOINT,
|
||||
userPoolId: process.env.ENSO_CLOUD_COGNITO_USER_POOL_ID,
|
||||
userPoolWebClientId: process.env.ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID,
|
||||
domain: process.env.ENSO_CLOUD_COGNITO_DOMAIN,
|
||||
|
@ -85,7 +85,7 @@ export function useForm<Schema extends types.TSchema, SubmitResult = void>(
|
||||
errorMap: (issue) => {
|
||||
switch (issue.code) {
|
||||
case 'too_small':
|
||||
if (issue.minimum === 0) {
|
||||
if (issue.minimum === 1 && issue.type === 'string') {
|
||||
return {
|
||||
message: getText('arbitraryFieldRequired'),
|
||||
}
|
||||
|
@ -17,8 +17,11 @@ export interface TextProps
|
||||
readonly elementType?: keyof HTMLElementTagNameMap
|
||||
readonly lineClamp?: number
|
||||
readonly tooltip?: React.ReactElement | string | false | null
|
||||
readonly tooltipTriggerRef?: React.RefObject<HTMLElement>
|
||||
readonly tooltipDisplay?: visualTooltip.VisualTooltipProps['display']
|
||||
readonly tooltipPlacement?: aria.Placement
|
||||
readonly tooltipOffset?: number
|
||||
readonly tooltipCrossOffset?: number
|
||||
}
|
||||
|
||||
export const TEXT_STYLE = twv.tv({
|
||||
@ -134,8 +137,11 @@ export const Text = forwardRef(function Text(props: TextProps, ref: React.Ref<HT
|
||||
balance,
|
||||
elementType: ElementType = 'span',
|
||||
tooltip: tooltipElement = children,
|
||||
tooltipTriggerRef,
|
||||
tooltipDisplay = 'whenOverflowing',
|
||||
tooltipPlacement,
|
||||
tooltipOffset,
|
||||
tooltipCrossOffset,
|
||||
textSelection,
|
||||
disableLineHeightCompensation = false,
|
||||
...ariaProps
|
||||
@ -176,9 +182,18 @@ export const Text = forwardRef(function Text(props: TextProps, ref: React.Ref<HT
|
||||
const { tooltip, targetProps } = visualTooltip.useVisualTooltip({
|
||||
isDisabled: isTooltipDisabled(),
|
||||
targetRef: textElementRef,
|
||||
triggerRef: tooltipTriggerRef,
|
||||
display: tooltipDisplay,
|
||||
children: tooltipElement,
|
||||
...(tooltipPlacement ? { overlayPositionProps: { placement: tooltipPlacement } } : {}),
|
||||
...(tooltipPlacement || tooltipOffset != null ?
|
||||
{
|
||||
overlayPositionProps: {
|
||||
...(tooltipPlacement && { placement: tooltipPlacement }),
|
||||
...(tooltipOffset != null && { offset: tooltipOffset }),
|
||||
...(tooltipCrossOffset != null && { crossOffset: tooltipCrossOffset }),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
|
||||
return (
|
||||
|
@ -18,10 +18,11 @@ export interface VisualTooltipProps
|
||||
readonly children: React.ReactNode
|
||||
readonly className?: string
|
||||
readonly targetRef: React.RefObject<HTMLElement>
|
||||
readonly triggerRef?: React.RefObject<HTMLElement> | undefined
|
||||
readonly isDisabled?: boolean
|
||||
readonly overlayPositionProps?: Pick<
|
||||
aria.AriaPositionProps,
|
||||
'containerPadding' | 'offset' | 'placement'
|
||||
'containerPadding' | 'crossOffset' | 'offset' | 'placement'
|
||||
>
|
||||
/**
|
||||
* Determines when the tooltip should be displayed.
|
||||
@ -56,6 +57,7 @@ export function useVisualTooltip(props: VisualTooltipProps): VisualTooltipReturn
|
||||
const {
|
||||
children,
|
||||
targetRef,
|
||||
triggerRef = targetRef,
|
||||
className,
|
||||
isDisabled = false,
|
||||
overlayPositionProps = {},
|
||||
@ -70,6 +72,7 @@ export function useVisualTooltip(props: VisualTooltipProps): VisualTooltipReturn
|
||||
const {
|
||||
containerPadding = 0,
|
||||
offset = DEFAULT_OFFSET,
|
||||
crossOffset = 0,
|
||||
placement = 'bottom',
|
||||
} = overlayPositionProps
|
||||
|
||||
@ -115,8 +118,9 @@ export function useVisualTooltip(props: VisualTooltipProps): VisualTooltipReturn
|
||||
const { overlayProps, updatePosition } = aria.useOverlayPosition({
|
||||
isOpen: state.isOpen,
|
||||
overlayRef: popoverRef,
|
||||
targetRef,
|
||||
targetRef: triggerRef,
|
||||
offset,
|
||||
crossOffset,
|
||||
placement,
|
||||
containerPadding,
|
||||
})
|
||||
|
@ -1,6 +1,5 @@
|
||||
/** @file A select menu with a dropdown. */
|
||||
import {
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
@ -92,22 +91,15 @@ export default function Autocomplete<T>(props: AutocompleteProps<T>) {
|
||||
const [selectedIndex, setSelectedIndex] = useState<number | null>(null)
|
||||
const valuesSet = useMemo(() => new Set(values), [values])
|
||||
const canEditText = setText != null && values.length === 0
|
||||
// We are only interested in the initial value of `canEditText` in effects.
|
||||
const canEditTextRef = useRef(canEditText)
|
||||
const isMultipleAndCustomValue = multiple === true && text != null
|
||||
const matchingItems = useMemo(
|
||||
() => (text == null ? items : items.filter((item) => matches(item, text))),
|
||||
[items, matches, text],
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (!canEditTextRef.current) {
|
||||
setIsDropdownVisible(true)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const fallbackInputRef = useRef<HTMLFieldSetElement>(null)
|
||||
const inputRef = rawInputRef ?? fallbackInputRef
|
||||
const containerRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
// This type is a little too wide but it is unavoidable.
|
||||
/** Set values, while also changing the input text. */
|
||||
@ -184,6 +176,7 @@ export default function Autocomplete<T>(props: AutocompleteProps<T>) {
|
||||
return (
|
||||
<div className={twJoin('relative isolate h-6 w-full', isDropdownVisible && 'z-1')}>
|
||||
<div
|
||||
ref={containerRef}
|
||||
onKeyDown={onKeyDown}
|
||||
className={twMerge(
|
||||
'absolute w-full grow transition-colors',
|
||||
@ -259,7 +252,7 @@ export default function Autocomplete<T>(props: AutocompleteProps<T>) {
|
||||
<div
|
||||
key={itemToKey(item)}
|
||||
className={twMerge(
|
||||
'text relative cursor-pointer whitespace-nowrap px-input-x last:rounded-b-xl hover:bg-hover-bg',
|
||||
'text relative min-w-max cursor-pointer whitespace-nowrap rounded-full px-input-x last:rounded-b-xl hover:bg-hover-bg',
|
||||
valuesSet.has(item) && 'bg-hover-bg',
|
||||
index === selectedIndex && 'bg-black/5',
|
||||
)}
|
||||
@ -271,7 +264,12 @@ export default function Autocomplete<T>(props: AutocompleteProps<T>) {
|
||||
toggleValue(item)
|
||||
}}
|
||||
>
|
||||
<Text truncate="1" className="w-full" tooltipPlacement="left">
|
||||
<Text
|
||||
truncate="1"
|
||||
className="w-full"
|
||||
tooltipPlacement="top"
|
||||
tooltipTriggerRef={containerRef}
|
||||
>
|
||||
{children(item)}
|
||||
</Text>
|
||||
</div>
|
||||
|
@ -51,8 +51,13 @@ export default function JSONSchemaInput(props: JSONSchemaInputProps) {
|
||||
schema.format === 'enso-secret'
|
||||
const { data: secrets } = useBackendQuery(remoteBackend, 'listSecrets', [], { enabled: isSecret })
|
||||
const autocompleteItems = isSecret ? secrets?.map((secret) => secret.path) ?? null : null
|
||||
const validityClassName =
|
||||
isAbsent || getValidator(path)(value) ? 'border-primary/20' : 'border-red-700/60'
|
||||
const isInvalid = !isAbsent && !getValidator(path)(value)
|
||||
const validationErrorClassName =
|
||||
isInvalid && 'border border-danger focus:border-danger focus:outline-danger'
|
||||
const errors =
|
||||
isInvalid && 'description' in schema && typeof schema.description === 'string' ?
|
||||
[<Text className="px-2 text-danger">{schema.description}</Text>]
|
||||
: []
|
||||
|
||||
// NOTE: `enum` schemas omitted for now as they are not yet used.
|
||||
if ('const' in schema) {
|
||||
@ -66,100 +71,121 @@ export default function JSONSchemaInput(props: JSONSchemaInputProps) {
|
||||
if ('format' in schema && schema.format === 'enso-secret') {
|
||||
const isValid = typeof value === 'string' && value !== ''
|
||||
children.push(
|
||||
<div className={twMerge('w-full rounded-default border-0.5', validityClassName)}>
|
||||
<Autocomplete
|
||||
items={autocompleteItems ?? []}
|
||||
itemToKey={(item) => item}
|
||||
placeholder={getText('enterSecretPath')}
|
||||
matches={(item, text) => item.toLowerCase().includes(text.toLowerCase())}
|
||||
values={isValid ? [value] : []}
|
||||
setValues={(values) => {
|
||||
onChange(values[0] ?? '')
|
||||
}}
|
||||
text={autocompleteText}
|
||||
setText={setAutocompleteText}
|
||||
<div className="flex flex-col">
|
||||
<div
|
||||
className={twMerge(
|
||||
'w-full rounded-default border-0.5 border-primary/20 outline-offset-2 transition-[border-color,outline] duration-200 focus:border-primary/50 focus:outline focus:outline-2 focus:outline-offset-0 focus:outline-primary',
|
||||
validationErrorClassName,
|
||||
)}
|
||||
>
|
||||
{(item) => item}
|
||||
</Autocomplete>
|
||||
<Autocomplete
|
||||
items={autocompleteItems ?? []}
|
||||
itemToKey={(item) => item}
|
||||
placeholder={getText('enterSecretPath')}
|
||||
matches={(item, text) => item.toLowerCase().includes(text.toLowerCase())}
|
||||
values={isValid ? [value] : []}
|
||||
setValues={(values) => {
|
||||
onChange(values[0] ?? '')
|
||||
}}
|
||||
text={autocompleteText}
|
||||
setText={setAutocompleteText}
|
||||
>
|
||||
{(item) => item}
|
||||
</Autocomplete>
|
||||
</div>
|
||||
{...errors}
|
||||
</div>,
|
||||
...errors,
|
||||
)
|
||||
} else {
|
||||
children.push(
|
||||
<FocusRing>
|
||||
<Input
|
||||
type="text"
|
||||
readOnly={readOnly}
|
||||
value={typeof value === 'string' ? value : ''}
|
||||
size={1}
|
||||
className={twMerge(
|
||||
'focus-child h-6 w-full grow rounded-input border-0.5 bg-transparent px-2 read-only:read-only',
|
||||
validityClassName,
|
||||
)}
|
||||
placeholder={getText('enterText')}
|
||||
onChange={(event) => {
|
||||
const newValue: string = event.currentTarget.value
|
||||
onChange(newValue)
|
||||
}}
|
||||
/>
|
||||
</FocusRing>,
|
||||
<div className="flex flex-col">
|
||||
<FocusRing>
|
||||
<Input
|
||||
type="text"
|
||||
readOnly={readOnly}
|
||||
value={typeof value === 'string' ? value : ''}
|
||||
size={1}
|
||||
className={twMerge(
|
||||
'focus-child h-6 w-full grow rounded-input border-0.5 border-primary/20 bg-transparent px-2 outline-offset-2 transition-[border-color,outline] duration-200 read-only:read-only focus:border-primary/50 focus:outline focus:outline-2 focus:outline-offset-0 focus:outline-primary',
|
||||
validationErrorClassName,
|
||||
)}
|
||||
placeholder={getText('enterText')}
|
||||
onChange={(event) => {
|
||||
const newValue: string = event.currentTarget.value
|
||||
onChange(newValue)
|
||||
}}
|
||||
/>
|
||||
</FocusRing>
|
||||
{...errors}
|
||||
</div>,
|
||||
)
|
||||
}
|
||||
break
|
||||
}
|
||||
case 'number': {
|
||||
children.push(
|
||||
<FocusRing>
|
||||
<Input
|
||||
type="number"
|
||||
readOnly={readOnly}
|
||||
value={typeof value === 'number' ? value : ''}
|
||||
size={1}
|
||||
className={twMerge(
|
||||
'focus-child h-6 w-full grow rounded-input border-0.5 bg-transparent px-2 read-only:read-only',
|
||||
validityClassName,
|
||||
)}
|
||||
placeholder={getText('enterNumber')}
|
||||
onChange={(event) => {
|
||||
const newValue: number = event.currentTarget.valueAsNumber
|
||||
if (Number.isFinite(newValue)) {
|
||||
onChange(newValue)
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</FocusRing>,
|
||||
<div className="flex flex-col">
|
||||
<FocusRing>
|
||||
<Input
|
||||
type="number"
|
||||
readOnly={readOnly}
|
||||
value={typeof value === 'number' ? value : ''}
|
||||
size={1}
|
||||
className={twMerge(
|
||||
'focus-child h-6 w-full grow rounded-input border-0.5 border-primary/20 bg-transparent px-2 outline-offset-2 transition-[border-color,outline] duration-200 read-only:read-only focus:border-primary/50 focus:outline focus:outline-2 focus:outline-offset-0 focus:outline-primary',
|
||||
validationErrorClassName,
|
||||
)}
|
||||
placeholder={getText('enterNumber')}
|
||||
onChange={(event) => {
|
||||
const newValue: number = event.currentTarget.valueAsNumber
|
||||
if (Number.isFinite(newValue)) {
|
||||
onChange(newValue)
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</FocusRing>
|
||||
{...errors}
|
||||
</div>,
|
||||
)
|
||||
break
|
||||
}
|
||||
case 'integer': {
|
||||
children.push(
|
||||
<FocusRing>
|
||||
<Input
|
||||
type="number"
|
||||
readOnly={readOnly}
|
||||
value={typeof value === 'number' ? value : ''}
|
||||
size={1}
|
||||
className={twMerge(
|
||||
'focus-child h-6 w-full grow rounded-input border-0.5 bg-transparent px-2 read-only:read-only',
|
||||
validityClassName,
|
||||
)}
|
||||
placeholder={getText('enterInteger')}
|
||||
onChange={(event) => {
|
||||
const newValue: number = Math.floor(event.currentTarget.valueAsNumber)
|
||||
onChange(newValue)
|
||||
}}
|
||||
/>
|
||||
</FocusRing>,
|
||||
<div className="flex flex-col">
|
||||
<FocusRing>
|
||||
<Input
|
||||
type="number"
|
||||
readOnly={readOnly}
|
||||
value={typeof value === 'number' ? value : ''}
|
||||
size={1}
|
||||
className={twMerge(
|
||||
'focus-child h-6 w-full grow rounded-input border-0.5 border-primary/20 bg-transparent px-2 outline-offset-2 transition-[border-color,outline] duration-200 read-only:read-only focus:border-primary/50 focus:outline focus:outline-2 focus:outline-offset-0 focus:outline-primary',
|
||||
validationErrorClassName,
|
||||
)}
|
||||
placeholder={getText('enterInteger')}
|
||||
onChange={(event) => {
|
||||
const newValue: number = Math.floor(event.currentTarget.valueAsNumber)
|
||||
onChange(newValue)
|
||||
}}
|
||||
/>
|
||||
</FocusRing>
|
||||
{...errors}
|
||||
</div>,
|
||||
)
|
||||
break
|
||||
}
|
||||
case 'boolean': {
|
||||
children.push(
|
||||
<Checkbox
|
||||
name="input"
|
||||
isReadOnly={readOnly}
|
||||
isSelected={typeof value === 'boolean' && value}
|
||||
onChange={onChange}
|
||||
/>,
|
||||
<div className="flex flex-col">
|
||||
<Checkbox
|
||||
name="input"
|
||||
isReadOnly={readOnly}
|
||||
isSelected={typeof value === 'boolean' && value}
|
||||
onChange={onChange}
|
||||
/>
|
||||
{...errors}
|
||||
</div>,
|
||||
)
|
||||
break
|
||||
}
|
||||
@ -186,7 +212,7 @@ export default function JSONSchemaInput(props: JSONSchemaInputProps) {
|
||||
>
|
||||
{propertyDefinitions.map((definition) => {
|
||||
const { key, schema: childSchema } = definition
|
||||
const isOptional = !requiredProperties.includes(key)
|
||||
const isOptional = !requiredProperties.includes(key) || isAbsent
|
||||
const isPresent = !isAbsent && value != null && key in value
|
||||
return constantValueOfSchema(defs, childSchema).length === 1 ?
|
||||
null
|
||||
@ -250,7 +276,7 @@ export default function JSONSchemaInput(props: JSONSchemaInputProps) {
|
||||
newValue = unsafeValue!
|
||||
}
|
||||
const fullObject =
|
||||
value ?? constantValueOfSchema(defs, childSchema, true)[0]
|
||||
value ?? constantValueOfSchema(defs, schema, true)[0]
|
||||
onChange(
|
||||
(
|
||||
typeof fullObject === 'object' &&
|
||||
@ -346,6 +372,7 @@ export default function JSONSchemaInput(props: JSONSchemaInputProps) {
|
||||
path={selectedChildPath}
|
||||
getValidator={getValidator}
|
||||
noBorder={noChildBorder}
|
||||
isAbsent={isAbsent}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
/>
|
||||
@ -364,6 +391,7 @@ export default function JSONSchemaInput(props: JSONSchemaInputProps) {
|
||||
path={`${path}/allOf/${i}`}
|
||||
getValidator={getValidator}
|
||||
noBorder={noChildBorder}
|
||||
isAbsent={isAbsent}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
/>
|
||||
|
@ -9,7 +9,11 @@ import BlankIcon from '#/assets/blank.svg'
|
||||
import * as dragAndDropHooks from '#/hooks/dragAndDropHooks'
|
||||
import { useEventCallback } from '#/hooks/eventCallbackHooks'
|
||||
|
||||
import { useDriveStore, useSetSelectedKeys } from '#/providers/DriveProvider'
|
||||
import {
|
||||
useDriveStore,
|
||||
useSetSelectedKeys,
|
||||
useToggleDirectoryExpansion,
|
||||
} from '#/providers/DriveProvider'
|
||||
import * as modalProvider from '#/providers/ModalProvider'
|
||||
import * as textProvider from '#/providers/TextProvider'
|
||||
|
||||
@ -36,6 +40,7 @@ import {
|
||||
backendQueryOptions,
|
||||
useAsset,
|
||||
useBackendMutationState,
|
||||
useUploadFiles,
|
||||
} from '#/hooks/backendHooks'
|
||||
import { createGetProjectDetailsQuery } from '#/hooks/projectHooks'
|
||||
import { useSyncRef } from '#/hooks/syncRefHooks'
|
||||
@ -274,7 +279,6 @@ export function RealAssetInternalRow(props: RealAssetRowInternalProps) {
|
||||
const { initialAssetEvents } = props
|
||||
const { nodeMap, doCopy, doCut, doPaste, doDelete: doDeleteRaw } = state
|
||||
const { doRestore, doMove, category, scrollContainerRef, rootDirectoryId, backend } = state
|
||||
const { doToggleDirectoryExpansion } = state
|
||||
|
||||
const driveStore = useDriveStore()
|
||||
const queryClient = useQueryClient()
|
||||
@ -304,6 +308,7 @@ export function RealAssetInternalRow(props: RealAssetRowInternalProps) {
|
||||
const [innerRowState, setRowState] = React.useState<assetsTable.AssetRowState>(
|
||||
assetRowUtils.INITIAL_ROW_STATE,
|
||||
)
|
||||
const toggleDirectoryExpansion = useToggleDirectoryExpansion()
|
||||
|
||||
const isNewlyCreated = useStore(driveStore, ({ newestFolderId }) => newestFolderId === asset.id)
|
||||
const isEditingName = innerRowState.isEditingName || isNewlyCreated
|
||||
@ -343,6 +348,7 @@ export function RealAssetInternalRow(props: RealAssetRowInternalProps) {
|
||||
|
||||
const toastAndLog = useToastAndLog()
|
||||
|
||||
const uploadFiles = useUploadFiles(backend, category)
|
||||
const createPermissionMutation = useMutation(backendMutationOptions(backend, 'createPermission'))
|
||||
const associateTagMutation = useMutation(backendMutationOptions(backend, 'associateTag'))
|
||||
|
||||
@ -707,7 +713,7 @@ export function RealAssetInternalRow(props: RealAssetRowInternalProps) {
|
||||
window.setTimeout(() => {
|
||||
setSelected(false)
|
||||
})
|
||||
doToggleDirectoryExpansion(asset.id, asset.id)
|
||||
toggleDirectoryExpansion(asset.id)
|
||||
}
|
||||
}}
|
||||
onContextMenu={(event) => {
|
||||
@ -752,7 +758,7 @@ export function RealAssetInternalRow(props: RealAssetRowInternalProps) {
|
||||
}
|
||||
if (asset.type === backendModule.AssetType.directory) {
|
||||
dragOverTimeoutHandle.current = window.setTimeout(() => {
|
||||
doToggleDirectoryExpansion(asset.id, asset.id, true)
|
||||
toggleDirectoryExpansion(asset.id, true)
|
||||
}, DRAG_EXPAND_DELAY_MS)
|
||||
}
|
||||
// Required because `dragover` does not fire on `mouseenter`.
|
||||
@ -800,7 +806,7 @@ export function RealAssetInternalRow(props: RealAssetRowInternalProps) {
|
||||
event.preventDefault()
|
||||
event.stopPropagation()
|
||||
unsetModal()
|
||||
doToggleDirectoryExpansion(directoryId, directoryId, true)
|
||||
toggleDirectoryExpansion(directoryId, true)
|
||||
const ids = payload
|
||||
.filter((payloadItem) => payloadItem.asset.parentId !== directoryId)
|
||||
.map((dragItem) => dragItem.key)
|
||||
@ -813,13 +819,8 @@ export function RealAssetInternalRow(props: RealAssetRowInternalProps) {
|
||||
} else if (event.dataTransfer.types.includes('Files')) {
|
||||
event.preventDefault()
|
||||
event.stopPropagation()
|
||||
doToggleDirectoryExpansion(directoryId, directoryId, true)
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.uploadFiles,
|
||||
parentKey: directoryId,
|
||||
parentId: directoryId,
|
||||
files: Array.from(event.dataTransfer.files),
|
||||
})
|
||||
toggleDirectoryExpansion(directoryId, true)
|
||||
void uploadFiles(Array.from(event.dataTransfer.files), directoryId, null)
|
||||
}
|
||||
}
|
||||
}}
|
||||
|
@ -6,7 +6,7 @@ import FolderArrowIcon from '#/assets/folder_arrow.svg'
|
||||
|
||||
import { backendMutationOptions } from '#/hooks/backendHooks'
|
||||
|
||||
import { useDriveStore } from '#/providers/DriveProvider'
|
||||
import { useDriveStore, useToggleDirectoryExpansion } from '#/providers/DriveProvider'
|
||||
import * as textProvider from '#/providers/TextProvider'
|
||||
|
||||
import * as ariaComponents from '#/components/AriaComponents'
|
||||
@ -38,10 +38,11 @@ export interface DirectoryNameColumnProps extends column.AssetColumnProps {
|
||||
* This should never happen.
|
||||
*/
|
||||
export default function DirectoryNameColumn(props: DirectoryNameColumnProps) {
|
||||
const { item, selected, state, rowState, setRowState, isEditable, depth } = props
|
||||
const { backend, nodeMap, doToggleDirectoryExpansion, expandedDirectoryIds } = state
|
||||
const { item, depth, selected, state, rowState, setRowState, isEditable } = props
|
||||
const { backend, nodeMap, expandedDirectoryIds } = state
|
||||
const { getText } = textProvider.useText()
|
||||
const driveStore = useDriveStore()
|
||||
const toggleDirectoryExpansion = useToggleDirectoryExpansion()
|
||||
const isExpanded = expandedDirectoryIds.includes(item.id)
|
||||
|
||||
const updateDirectoryMutation = useMutation(backendMutationOptions(backend, 'updateDirectory'))
|
||||
@ -98,7 +99,7 @@ export default function DirectoryNameColumn(props: DirectoryNameColumnProps) {
|
||||
isExpanded && 'rotate-90',
|
||||
)}
|
||||
onPress={() => {
|
||||
doToggleDirectoryExpansion(item.id, item.id)
|
||||
toggleDirectoryExpansion(item.id)
|
||||
}}
|
||||
/>
|
||||
<SvgMask src={FolderIcon} className="m-name-column-icon size-4 group-hover:hidden" />
|
||||
|
@ -100,11 +100,13 @@ const MODIFIER_TO_TEXT_ID: Readonly<Record<ModifierKey, text.TextId>> = {
|
||||
/** Props for a {@link KeyboardShortcut}, specifying the keyboard action. */
|
||||
export interface KeyboardShortcutActionProps {
|
||||
readonly action: DashboardBindingKey
|
||||
readonly className?: string
|
||||
}
|
||||
|
||||
/** Props for a {@link KeyboardShortcut}, specifying the shortcut string. */
|
||||
export interface KeyboardShortcutShortcutProps {
|
||||
readonly shortcut: string
|
||||
readonly className?: string
|
||||
}
|
||||
|
||||
/** Props for a {@link KeyboardShortcut}. */
|
||||
@ -112,10 +114,12 @@ export type KeyboardShortcutProps = KeyboardShortcutActionProps | KeyboardShortc
|
||||
|
||||
/** A visual representation of a keyboard shortcut. */
|
||||
export default function KeyboardShortcut(props: KeyboardShortcutProps) {
|
||||
const { className } = props
|
||||
const { getText } = useText()
|
||||
const inputBindings = useInputBindings()
|
||||
const shortcutString =
|
||||
'shortcut' in props ? props.shortcut : inputBindings.metadata[props.action].bindings[0]
|
||||
|
||||
if (shortcutString == null) {
|
||||
return null
|
||||
} else {
|
||||
@ -125,7 +129,8 @@ export default function KeyboardShortcut(props: KeyboardShortcutProps) {
|
||||
<div
|
||||
className={twMerge(
|
||||
'flex items-center',
|
||||
detect.isOnMacOS() ? 'gap-modifiers-macos' : 'gap-modifiers',
|
||||
className,
|
||||
detect.isOnMacOS() ? 'gap-[3px]' : 'gap-0.5',
|
||||
)}
|
||||
>
|
||||
{modifiers.map(
|
||||
|
@ -117,9 +117,9 @@
|
||||
"libraryName": { "const": "Standard.Base" },
|
||||
"path": {
|
||||
"title": "Path",
|
||||
"description": "Must start with \"enso://<organization-name>/\".",
|
||||
"description": "Must start with \"enso://Users/<username>/\" or \"enso://Teams/<team name>/\".",
|
||||
"type": "string",
|
||||
"pattern": "^enso://.+/.*$",
|
||||
"pattern": "^enso://(?:Users|Teams)/.*/.*$",
|
||||
"format": "enso-file"
|
||||
},
|
||||
"format": { "title": "Format", "$ref": "#/$defs/Format" }
|
||||
|
@ -2,13 +2,7 @@
|
||||
|
||||
/** Possible types of changes to the file list. */
|
||||
enum AssetListEventType {
|
||||
newFolder = 'new-folder',
|
||||
newProject = 'new-project',
|
||||
uploadFiles = 'upload-files',
|
||||
newDatalink = 'new-datalink',
|
||||
newSecret = 'new-secret',
|
||||
duplicateProject = 'duplicate-project',
|
||||
closeFolder = 'close-folder',
|
||||
copy = 'copy',
|
||||
move = 'move',
|
||||
delete = 'delete',
|
||||
|
@ -20,13 +20,7 @@ interface AssetListBaseEvent<Type extends AssetListEventType> {
|
||||
|
||||
/** All possible events. */
|
||||
interface AssetListEvents {
|
||||
readonly newFolder: AssetListNewFolderEvent
|
||||
readonly newProject: AssetListNewProjectEvent
|
||||
readonly uploadFiles: AssetListUploadFilesEvent
|
||||
readonly newSecret: AssetListNewSecretEvent
|
||||
readonly newDatalink: AssetListNewDatalinkEvent
|
||||
readonly duplicateProject: AssetListDuplicateProjectEvent
|
||||
readonly closeFolder: AssetListCloseFolderEvent
|
||||
readonly copy: AssetListCopyEvent
|
||||
readonly move: AssetListMoveEvent
|
||||
readonly delete: AssetListDeleteEvent
|
||||
@ -45,46 +39,6 @@ type SanityCheck<
|
||||
} = AssetListEvents,
|
||||
> = [T]
|
||||
|
||||
/** A signal to create a new directory. */
|
||||
interface AssetListNewFolderEvent extends AssetListBaseEvent<AssetListEventType.newFolder> {
|
||||
readonly parentKey: backend.DirectoryId
|
||||
readonly parentId: backend.DirectoryId
|
||||
}
|
||||
|
||||
/** A signal to create a new project. */
|
||||
interface AssetListNewProjectEvent extends AssetListBaseEvent<AssetListEventType.newProject> {
|
||||
readonly parentKey: backend.DirectoryId
|
||||
readonly parentId: backend.DirectoryId
|
||||
readonly templateId: string | null
|
||||
readonly datalinkId: backend.DatalinkId | null
|
||||
readonly preferredName: string | null
|
||||
readonly onCreated?: (project: backend.CreatedProject, parentId: backend.DirectoryId) => void
|
||||
readonly onError?: () => void
|
||||
}
|
||||
|
||||
/** A signal to upload files. */
|
||||
interface AssetListUploadFilesEvent extends AssetListBaseEvent<AssetListEventType.uploadFiles> {
|
||||
readonly parentKey: backend.DirectoryId
|
||||
readonly parentId: backend.DirectoryId
|
||||
readonly files: File[]
|
||||
}
|
||||
|
||||
/** A signal to create a new secret. */
|
||||
interface AssetListNewDatalinkEvent extends AssetListBaseEvent<AssetListEventType.newDatalink> {
|
||||
readonly parentKey: backend.DirectoryId
|
||||
readonly parentId: backend.DirectoryId
|
||||
readonly name: string
|
||||
readonly value: unknown
|
||||
}
|
||||
|
||||
/** A signal to create a new secret. */
|
||||
interface AssetListNewSecretEvent extends AssetListBaseEvent<AssetListEventType.newSecret> {
|
||||
readonly parentKey: backend.DirectoryId
|
||||
readonly parentId: backend.DirectoryId
|
||||
readonly name: string
|
||||
readonly value: string
|
||||
}
|
||||
|
||||
/** A signal to duplicate a project. */
|
||||
interface AssetListDuplicateProjectEvent
|
||||
extends AssetListBaseEvent<AssetListEventType.duplicateProject> {
|
||||
@ -94,12 +48,6 @@ interface AssetListDuplicateProjectEvent
|
||||
readonly versionId: backend.S3ObjectVersionId
|
||||
}
|
||||
|
||||
/** A signal to close (collapse) a folder. */
|
||||
interface AssetListCloseFolderEvent extends AssetListBaseEvent<AssetListEventType.closeFolder> {
|
||||
readonly id: backend.DirectoryId
|
||||
readonly key: backend.DirectoryId
|
||||
}
|
||||
|
||||
/** A signal that files should be copied. */
|
||||
interface AssetListCopyEvent extends AssetListBaseEvent<AssetListEventType.copy> {
|
||||
readonly newParentKey: backend.DirectoryId
|
||||
|
@ -1,646 +0,0 @@
|
||||
/** @file Hooks for interacting with the backend. */
|
||||
import { useId, useMemo, useState } from 'react'
|
||||
|
||||
import {
|
||||
queryOptions,
|
||||
useMutation,
|
||||
useMutationState,
|
||||
useQuery,
|
||||
type Mutation,
|
||||
type MutationKey,
|
||||
type UseMutationOptions,
|
||||
type UseQueryOptions,
|
||||
type UseQueryResult,
|
||||
} from '@tanstack/react-query'
|
||||
import { toast } from 'react-toastify'
|
||||
import invariant from 'tiny-invariant'
|
||||
|
||||
import {
|
||||
backendQueryOptions as backendQueryOptionsBase,
|
||||
type BackendMethods,
|
||||
} from 'enso-common/src/backendQuery'
|
||||
|
||||
import { useEventCallback } from '#/hooks/eventCallbackHooks'
|
||||
import { useToastAndLog, useToastAndLogWithId } from '#/hooks/toastAndLogHooks'
|
||||
import { CATEGORY_TO_FILTER_BY, type Category } from '#/layouts/CategorySwitcher/Category'
|
||||
import { useText } from '#/providers/TextProvider'
|
||||
import type Backend from '#/services/Backend'
|
||||
import * as backendModule from '#/services/Backend'
|
||||
import {
|
||||
AssetType,
|
||||
BackendType,
|
||||
type AssetId,
|
||||
type DirectoryAsset,
|
||||
type DirectoryId,
|
||||
type User,
|
||||
type UserGroupInfo,
|
||||
} from '#/services/Backend'
|
||||
import { TEAMS_DIRECTORY_ID, USERS_DIRECTORY_ID } from '#/services/remoteBackendPaths'
|
||||
import { usePreventNavigation } from '#/utilities/preventNavigation'
|
||||
import { toRfc3339 } from '../utilities/dateTime'
|
||||
|
||||
// The number of bytes in 1 megabyte.
|
||||
const MB_BYTES = 1_000_000
|
||||
const S3_CHUNK_SIZE_MB = Math.round(backendModule.S3_CHUNK_SIZE_BYTES / MB_BYTES)
|
||||
|
||||
// ============================
|
||||
// === DefineBackendMethods ===
|
||||
// ============================
|
||||
|
||||
/** Ensure that the given type contains only names of backend methods. */
|
||||
type DefineBackendMethods<T extends keyof Backend> = T
|
||||
|
||||
// ======================
|
||||
// === MutationMethod ===
|
||||
// ======================
|
||||
|
||||
/** Names of methods corresponding to mutations. */
|
||||
export type MutationMethod = DefineBackendMethods<
|
||||
| 'acceptInvitation'
|
||||
| 'associateTag'
|
||||
| 'changeUserGroup'
|
||||
| 'closeProject'
|
||||
| 'copyAsset'
|
||||
| 'createCheckoutSession'
|
||||
| 'createDatalink'
|
||||
| 'createDirectory'
|
||||
| 'createPermission'
|
||||
| 'createProject'
|
||||
| 'createSecret'
|
||||
| 'createTag'
|
||||
| 'createUser'
|
||||
| 'createUserGroup'
|
||||
| 'declineInvitation'
|
||||
| 'deleteAsset'
|
||||
| 'deleteDatalink'
|
||||
| 'deleteInvitation'
|
||||
| 'deleteTag'
|
||||
| 'deleteUser'
|
||||
| 'deleteUserGroup'
|
||||
| 'duplicateProject'
|
||||
| 'inviteUser'
|
||||
| 'logEvent'
|
||||
| 'openProject'
|
||||
| 'removeUser'
|
||||
| 'resendInvitation'
|
||||
| 'restoreUser'
|
||||
| 'undoDeleteAsset'
|
||||
| 'updateAsset'
|
||||
| 'updateDirectory'
|
||||
| 'updateFile'
|
||||
| 'updateOrganization'
|
||||
| 'updateProject'
|
||||
| 'updateSecret'
|
||||
| 'updateUser'
|
||||
| 'uploadFileChunk'
|
||||
| 'uploadFileEnd'
|
||||
| 'uploadFileStart'
|
||||
| 'uploadOrganizationPicture'
|
||||
| 'uploadUserPicture'
|
||||
>
|
||||
|
||||
// =======================
|
||||
// === useBackendQuery ===
|
||||
// =======================
|
||||
|
||||
export function backendQueryOptions<Method extends BackendMethods>(
|
||||
backend: Backend,
|
||||
method: Method,
|
||||
args: Parameters<Backend[Method]>,
|
||||
options?: Omit<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryFn' | 'queryKey'> &
|
||||
Partial<Pick<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryKey'>>,
|
||||
): UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>
|
||||
export function backendQueryOptions<Method extends BackendMethods>(
|
||||
backend: Backend | null,
|
||||
method: Method,
|
||||
args: Parameters<Backend[Method]>,
|
||||
options?: Omit<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryFn' | 'queryKey'> &
|
||||
Partial<Pick<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryKey'>>,
|
||||
): UseQueryOptions<Awaited<ReturnType<Backend[Method]>> | undefined>
|
||||
/** Wrap a backend method call in a React Query. */
|
||||
export function backendQueryOptions<Method extends BackendMethods>(
|
||||
backend: Backend | null,
|
||||
method: Method,
|
||||
args: Parameters<Backend[Method]>,
|
||||
options?: Omit<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryFn' | 'queryKey'> &
|
||||
Partial<Pick<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryKey'>>,
|
||||
) {
|
||||
// @ts-expect-error This call is generic over the presence or absence of `inputData`.
|
||||
return queryOptions<Awaited<ReturnType<Backend[Method]>>>({
|
||||
...options,
|
||||
...backendQueryOptionsBase(backend, method, args, options?.queryKey),
|
||||
// eslint-disable-next-line no-restricted-syntax, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-return
|
||||
queryFn: () => (backend?.[method] as any)?.(...args),
|
||||
})
|
||||
}
|
||||
|
||||
export function useBackendQuery<Method extends BackendMethods>(
|
||||
backend: Backend,
|
||||
method: Method,
|
||||
args: Parameters<Backend[Method]>,
|
||||
options?: Omit<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryFn' | 'queryKey'> &
|
||||
Partial<Pick<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryKey'>>,
|
||||
): UseQueryResult<Awaited<ReturnType<Backend[Method]>>>
|
||||
export function useBackendQuery<Method extends BackendMethods>(
|
||||
backend: Backend | null,
|
||||
method: Method,
|
||||
args: Parameters<Backend[Method]>,
|
||||
options?: Omit<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryFn' | 'queryKey'> &
|
||||
Partial<Pick<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryKey'>>,
|
||||
): UseQueryResult<Awaited<ReturnType<Backend[Method]>> | undefined>
|
||||
/** Wrap a backend method call in a React Query. */
|
||||
export function useBackendQuery<Method extends BackendMethods>(
|
||||
backend: Backend | null,
|
||||
method: Method,
|
||||
args: Parameters<Backend[Method]>,
|
||||
options?: Omit<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryFn' | 'queryKey'> &
|
||||
Partial<Pick<UseQueryOptions<Awaited<ReturnType<Backend[Method]>>>, 'queryKey'>>,
|
||||
) {
|
||||
return useQuery(backendQueryOptions(backend, method, args, options))
|
||||
}
|
||||
|
||||
// ==========================
|
||||
// === useBackendMutation ===
|
||||
// ==========================
|
||||
|
||||
const INVALIDATE_ALL_QUERIES = Symbol('invalidate all queries')
|
||||
const INVALIDATION_MAP: Partial<
|
||||
Record<MutationMethod, readonly (BackendMethods | typeof INVALIDATE_ALL_QUERIES)[]>
|
||||
> = {
|
||||
createUser: ['usersMe'],
|
||||
updateUser: ['usersMe'],
|
||||
deleteUser: ['usersMe'],
|
||||
restoreUser: ['usersMe'],
|
||||
uploadUserPicture: ['usersMe'],
|
||||
updateOrganization: ['getOrganization'],
|
||||
uploadOrganizationPicture: ['getOrganization'],
|
||||
createUserGroup: ['listUserGroups'],
|
||||
deleteUserGroup: ['listUserGroups'],
|
||||
changeUserGroup: ['listUsers'],
|
||||
createTag: ['listTags'],
|
||||
deleteTag: ['listTags'],
|
||||
associateTag: ['listDirectory'],
|
||||
acceptInvitation: [INVALIDATE_ALL_QUERIES],
|
||||
declineInvitation: ['usersMe'],
|
||||
createProject: ['listDirectory'],
|
||||
duplicateProject: ['listDirectory'],
|
||||
createDirectory: ['listDirectory'],
|
||||
createSecret: ['listDirectory'],
|
||||
updateSecret: ['listDirectory'],
|
||||
createDatalink: ['listDirectory', 'getDatalink'],
|
||||
uploadFileEnd: ['listDirectory'],
|
||||
copyAsset: ['listDirectory', 'listAssetVersions'],
|
||||
deleteAsset: ['listDirectory', 'listAssetVersions'],
|
||||
undoDeleteAsset: ['listDirectory'],
|
||||
updateAsset: ['listDirectory', 'listAssetVersions'],
|
||||
closeProject: ['listDirectory', 'listAssetVersions'],
|
||||
updateDirectory: ['listDirectory'],
|
||||
}
|
||||
|
||||
/** The type of the corresponding mutation for the given backend method. */
|
||||
export type BackendMutation<Method extends MutationMethod> = Mutation<
|
||||
Awaited<ReturnType<Backend[Method]>>,
|
||||
Error,
|
||||
Parameters<Backend[Method]>
|
||||
>
|
||||
|
||||
export function backendMutationOptions<Method extends MutationMethod>(
|
||||
backend: Backend,
|
||||
method: Method,
|
||||
options?: Omit<
|
||||
UseMutationOptions<Awaited<ReturnType<Backend[Method]>>, Error, Parameters<Backend[Method]>>,
|
||||
'mutationFn'
|
||||
>,
|
||||
): UseMutationOptions<Awaited<ReturnType<Backend[Method]>>, Error, Parameters<Backend[Method]>>
|
||||
export function backendMutationOptions<Method extends MutationMethod>(
|
||||
backend: Backend | null,
|
||||
method: Method,
|
||||
options?: Omit<
|
||||
UseMutationOptions<Awaited<ReturnType<Backend[Method]>>, Error, Parameters<Backend[Method]>>,
|
||||
'mutationFn'
|
||||
>,
|
||||
): UseMutationOptions<
|
||||
Awaited<ReturnType<Backend[Method]>> | undefined,
|
||||
Error,
|
||||
Parameters<Backend[Method]>
|
||||
>
|
||||
/** Wrap a backend method call in a React Query Mutation. */
|
||||
export function backendMutationOptions<Method extends MutationMethod>(
|
||||
backend: Backend | null,
|
||||
method: Method,
|
||||
options?: Omit<
|
||||
UseMutationOptions<Awaited<ReturnType<Backend[Method]>>, Error, Parameters<Backend[Method]>>,
|
||||
'mutationFn'
|
||||
>,
|
||||
): UseMutationOptions<Awaited<ReturnType<Backend[Method]>>, Error, Parameters<Backend[Method]>> {
|
||||
return {
|
||||
...options,
|
||||
mutationKey: [backend?.type, method, ...(options?.mutationKey ?? [])],
|
||||
// eslint-disable-next-line no-restricted-syntax, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-return
|
||||
mutationFn: (args) => (backend?.[method] as any)?.(...args),
|
||||
networkMode: backend?.type === BackendType.local ? 'always' : 'online',
|
||||
meta: {
|
||||
invalidates: [
|
||||
...(options?.meta?.invalidates ?? []),
|
||||
...(INVALIDATION_MAP[method]?.map((queryMethod) =>
|
||||
queryMethod === INVALIDATE_ALL_QUERIES ? [backend?.type] : [backend?.type, queryMethod],
|
||||
) ?? []),
|
||||
],
|
||||
awaitInvalidates: options?.meta?.awaitInvalidates ?? true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// === useListUserGroupsWithUsers ===
|
||||
// ==================================
|
||||
|
||||
/** A user group, as well as the users that are a part of the user group. */
|
||||
export interface UserGroupInfoWithUsers extends UserGroupInfo {
|
||||
readonly users: readonly User[]
|
||||
}
|
||||
|
||||
/** A list of user groups, taking into account optimistic state. */
|
||||
export function useListUserGroupsWithUsers(
|
||||
backend: Backend,
|
||||
): readonly UserGroupInfoWithUsers[] | null {
|
||||
const listUserGroupsQuery = useBackendQuery(backend, 'listUserGroups', [])
|
||||
const listUsersQuery = useBackendQuery(backend, 'listUsers', [])
|
||||
return useMemo(() => {
|
||||
if (listUserGroupsQuery.data == null || listUsersQuery.data == null) {
|
||||
return null
|
||||
} else {
|
||||
const result = listUserGroupsQuery.data.map((userGroup) => {
|
||||
const usersInGroup: readonly User[] = listUsersQuery.data.filter((user) =>
|
||||
user.userGroups?.includes(userGroup.id),
|
||||
)
|
||||
return { ...userGroup, users: usersInGroup }
|
||||
})
|
||||
return result
|
||||
}
|
||||
}, [listUserGroupsQuery.data, listUsersQuery.data])
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
export interface ListDirectoryQueryOptions {
|
||||
readonly backend: Backend
|
||||
readonly parentId: DirectoryId
|
||||
readonly category: Category
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a query options object to fetch the children of a directory.
|
||||
*/
|
||||
export function listDirectoryQueryOptions(options: ListDirectoryQueryOptions) {
|
||||
const { backend, parentId, category } = options
|
||||
|
||||
return queryOptions({
|
||||
queryKey: [
|
||||
backend.type,
|
||||
'listDirectory',
|
||||
parentId,
|
||||
{
|
||||
labels: null,
|
||||
filterBy: CATEGORY_TO_FILTER_BY[category.type],
|
||||
recentProjects: category.type === 'recent',
|
||||
},
|
||||
] as const,
|
||||
queryFn: async () => {
|
||||
try {
|
||||
return await backend.listDirectory(
|
||||
{
|
||||
parentId,
|
||||
filterBy: CATEGORY_TO_FILTER_BY[category.type],
|
||||
labels: null,
|
||||
recentProjects: category.type === 'recent',
|
||||
},
|
||||
parentId,
|
||||
)
|
||||
} catch {
|
||||
throw Object.assign(new Error(), { parentId })
|
||||
}
|
||||
},
|
||||
|
||||
meta: { persist: false },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload progress for {@link useUploadFileMutation}.
|
||||
*/
|
||||
export interface UploadFileMutationProgress {
|
||||
/**
|
||||
* Whether this is the first progress update.
|
||||
* Useful to determine whether to create a new toast or to update an existing toast.
|
||||
*/
|
||||
readonly event: 'begin' | 'chunk' | 'end'
|
||||
readonly sentMb: number
|
||||
readonly totalMb: number
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
export interface UseAssetOptions extends ListDirectoryQueryOptions {
|
||||
readonly assetId: AssetId
|
||||
}
|
||||
|
||||
/** Data for a specific asset. */
|
||||
export function useAsset(options: UseAssetOptions) {
|
||||
const { parentId, assetId } = options
|
||||
|
||||
const { data: asset } = useQuery({
|
||||
...listDirectoryQueryOptions(options),
|
||||
select: (data) => data.find((child) => child.id === assetId),
|
||||
})
|
||||
|
||||
if (asset) {
|
||||
return asset
|
||||
}
|
||||
|
||||
const shared = {
|
||||
parentId,
|
||||
projectState: null,
|
||||
extension: null,
|
||||
description: '',
|
||||
modifiedAt: toRfc3339(new Date()),
|
||||
permissions: [],
|
||||
labels: [],
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
}
|
||||
switch (true) {
|
||||
case assetId === USERS_DIRECTORY_ID: {
|
||||
return {
|
||||
...shared,
|
||||
id: assetId,
|
||||
title: 'Users',
|
||||
type: AssetType.directory,
|
||||
} satisfies DirectoryAsset
|
||||
}
|
||||
case assetId === TEAMS_DIRECTORY_ID: {
|
||||
return {
|
||||
...shared,
|
||||
id: assetId,
|
||||
title: 'Teams',
|
||||
type: AssetType.directory,
|
||||
} satisfies DirectoryAsset
|
||||
}
|
||||
case backendModule.isLoadingAssetId(assetId): {
|
||||
return {
|
||||
...shared,
|
||||
id: assetId,
|
||||
title: '',
|
||||
type: AssetType.specialLoading,
|
||||
} satisfies backendModule.SpecialLoadingAsset
|
||||
}
|
||||
case backendModule.isEmptyAssetId(assetId): {
|
||||
return {
|
||||
...shared,
|
||||
id: assetId,
|
||||
title: '',
|
||||
type: AssetType.specialEmpty,
|
||||
} satisfies backendModule.SpecialEmptyAsset
|
||||
}
|
||||
case backendModule.isErrorAssetId(assetId): {
|
||||
return {
|
||||
...shared,
|
||||
id: assetId,
|
||||
title: '',
|
||||
type: AssetType.specialError,
|
||||
} satisfies backendModule.SpecialErrorAsset
|
||||
}
|
||||
default: {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Data for a specific asset. */
|
||||
export function useAssetStrict(options: UseAssetOptions) {
|
||||
const asset = useAsset(options)
|
||||
|
||||
invariant(
|
||||
asset,
|
||||
`Expected asset to be defined, but got undefined, Asset ID: ${JSON.stringify(options.assetId)}`,
|
||||
)
|
||||
|
||||
return asset
|
||||
}
|
||||
|
||||
/** Return matching in-flight mutations */
|
||||
export function useBackendMutationState<Method extends MutationMethod, Result>(
|
||||
backend: Backend,
|
||||
method: Method,
|
||||
options: {
|
||||
mutationKey?: MutationKey
|
||||
predicate?: (mutation: BackendMutation<Method>) => boolean
|
||||
select?: (mutation: BackendMutation<Method>) => Result
|
||||
} = {},
|
||||
) {
|
||||
const { mutationKey, predicate, select } = options
|
||||
return useMutationState({
|
||||
filters: {
|
||||
...backendMutationOptions(backend, method, mutationKey ? { mutationKey } : {}),
|
||||
predicate: (mutation: BackendMutation<Method>) =>
|
||||
mutation.state.status === 'pending' && (predicate?.(mutation) ?? true),
|
||||
},
|
||||
// This is UNSAFE when the `Result` parameter is explicitly specified in the
|
||||
// generic parameter list.
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
select: select as (mutation: Mutation<unknown, Error, unknown, unknown>) => Result,
|
||||
})
|
||||
}
|
||||
|
||||
/** Options for {@link useUploadFileMutation}. */
|
||||
export interface UploadFileMutationOptions {
|
||||
/**
|
||||
* Defaults to 3.
|
||||
* Controls the default value of {@link UploadFileMutationOptions['chunkRetries']}
|
||||
* and {@link UploadFileMutationOptions['endRetries']}.
|
||||
*/
|
||||
readonly retries?: number
|
||||
/** Defaults to {@link UploadFileMutationOptions['retries']}. */
|
||||
readonly chunkRetries?: number
|
||||
/** Defaults to {@link UploadFileMutationOptions['retries']}. */
|
||||
readonly endRetries?: number
|
||||
/** Called for all progress updates (`onBegin`, `onChunkSuccess` and `onSuccess`). */
|
||||
readonly onProgress?: (progress: UploadFileMutationProgress) => void
|
||||
/** Called before any mutations are sent. */
|
||||
readonly onBegin?: (progress: UploadFileMutationProgress) => void
|
||||
/** Called after each successful chunk upload mutation. */
|
||||
readonly onChunkSuccess?: (progress: UploadFileMutationProgress) => void
|
||||
/** Called after the entire mutation succeeds. */
|
||||
readonly onSuccess?: (progress: UploadFileMutationProgress) => void
|
||||
/** Called after any mutations fail. */
|
||||
readonly onError?: (error: unknown) => void
|
||||
/** Called after `onSuccess` or `onError`, depending on whether the mutation succeeded. */
|
||||
readonly onSettled?: (progress: UploadFileMutationProgress | null, error: unknown) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Call "upload file" mutations for a file.
|
||||
* Always uses multipart upload for Cloud backend.
|
||||
* Shows toasts to update progress.
|
||||
*/
|
||||
export function useUploadFileWithToastMutation(
|
||||
backend: Backend,
|
||||
options: UploadFileMutationOptions = {},
|
||||
) {
|
||||
const toastId = useId()
|
||||
const { getText } = useText()
|
||||
const toastAndLog = useToastAndLogWithId()
|
||||
const { onBegin, onChunkSuccess, onSuccess, onError } = options
|
||||
|
||||
const mutation = useUploadFileMutation(backend, {
|
||||
...options,
|
||||
onBegin: (progress) => {
|
||||
onBegin?.(progress)
|
||||
const { sentMb, totalMb } = progress
|
||||
toast.loading(getText('uploadLargeFileStatus', sentMb, totalMb), {
|
||||
toastId,
|
||||
position: 'bottom-right',
|
||||
})
|
||||
},
|
||||
onChunkSuccess: (progress) => {
|
||||
onChunkSuccess?.(progress)
|
||||
const { sentMb, totalMb } = progress
|
||||
const text = getText('uploadLargeFileStatus', sentMb, totalMb)
|
||||
toast.update(toastId, { render: text })
|
||||
},
|
||||
onSuccess: (progress) => {
|
||||
onSuccess?.(progress)
|
||||
toast.update(toastId, {
|
||||
type: 'success',
|
||||
render: getText('uploadLargeFileSuccess'),
|
||||
isLoading: false,
|
||||
autoClose: null,
|
||||
})
|
||||
},
|
||||
onError: (error) => {
|
||||
onError?.(error)
|
||||
toastAndLog(toastId, 'uploadLargeFileError', error)
|
||||
},
|
||||
})
|
||||
|
||||
usePreventNavigation({ message: getText('anUploadIsInProgress'), isEnabled: mutation.isPending })
|
||||
|
||||
return mutation
|
||||
}
|
||||
|
||||
/**
|
||||
* Call "upload file" mutations for a file.
|
||||
* Always uses multipart upload for Cloud backend.
|
||||
*/
|
||||
export function useUploadFileMutation(backend: Backend, options: UploadFileMutationOptions = {}) {
|
||||
const toastAndLog = useToastAndLog()
|
||||
const {
|
||||
retries = 3,
|
||||
chunkRetries = retries,
|
||||
endRetries = retries,
|
||||
onError = (error) => {
|
||||
toastAndLog('uploadLargeFileError', error)
|
||||
},
|
||||
} = options
|
||||
const uploadFileStartMutation = useMutation(backendMutationOptions(backend, 'uploadFileStart'))
|
||||
const uploadFileChunkMutation = useMutation(
|
||||
backendMutationOptions(backend, 'uploadFileChunk', { retry: chunkRetries }),
|
||||
)
|
||||
const uploadFileEndMutation = useMutation(
|
||||
backendMutationOptions(backend, 'uploadFileEnd', { retry: endRetries }),
|
||||
)
|
||||
const [variables, setVariables] =
|
||||
useState<[params: backendModule.UploadFileRequestParams, file: File]>()
|
||||
const [sentMb, setSentMb] = useState(0)
|
||||
const [totalMb, setTotalMb] = useState(0)
|
||||
const mutateAsync = useEventCallback(
|
||||
async (body: backendModule.UploadFileRequestParams, file: File) => {
|
||||
setVariables([body, file])
|
||||
const fileSizeMb = Math.ceil(file.size / MB_BYTES)
|
||||
options.onBegin?.({ event: 'begin', sentMb: 0, totalMb: fileSizeMb })
|
||||
setSentMb(0)
|
||||
setTotalMb(fileSizeMb)
|
||||
try {
|
||||
const { sourcePath, uploadId, presignedUrls } = await uploadFileStartMutation.mutateAsync([
|
||||
body,
|
||||
file,
|
||||
])
|
||||
const parts: backendModule.S3MultipartPart[] = []
|
||||
for (const [url, i] of Array.from(
|
||||
presignedUrls,
|
||||
(presignedUrl, index) => [presignedUrl, index] as const,
|
||||
)) {
|
||||
parts.push(await uploadFileChunkMutation.mutateAsync([url, file, i]))
|
||||
const newSentMb = Math.min((i + 1) * S3_CHUNK_SIZE_MB, fileSizeMb)
|
||||
setSentMb(newSentMb)
|
||||
options.onChunkSuccess?.({
|
||||
event: 'chunk',
|
||||
sentMb: newSentMb,
|
||||
totalMb: fileSizeMb,
|
||||
})
|
||||
}
|
||||
const result = await uploadFileEndMutation.mutateAsync([
|
||||
{
|
||||
parentDirectoryId: body.parentDirectoryId,
|
||||
parts,
|
||||
sourcePath: sourcePath,
|
||||
uploadId: uploadId,
|
||||
assetId: body.fileId,
|
||||
fileName: body.fileName,
|
||||
},
|
||||
])
|
||||
setSentMb(fileSizeMb)
|
||||
const progress: UploadFileMutationProgress = {
|
||||
event: 'end',
|
||||
sentMb: fileSizeMb,
|
||||
totalMb: fileSizeMb,
|
||||
}
|
||||
options.onSuccess?.(progress)
|
||||
options.onSettled?.(progress, null)
|
||||
return result
|
||||
} catch (error) {
|
||||
onError(error)
|
||||
options.onSettled?.(null, error)
|
||||
throw error
|
||||
}
|
||||
},
|
||||
)
|
||||
const mutate = useEventCallback((params: backendModule.UploadFileRequestParams, file: File) => {
|
||||
void mutateAsync(params, file)
|
||||
})
|
||||
|
||||
return {
|
||||
sentMb,
|
||||
totalMb,
|
||||
variables,
|
||||
mutate,
|
||||
mutateAsync,
|
||||
context: uploadFileEndMutation.context,
|
||||
data: uploadFileEndMutation.data,
|
||||
failureCount:
|
||||
uploadFileEndMutation.failureCount +
|
||||
uploadFileChunkMutation.failureCount +
|
||||
uploadFileStartMutation.failureCount,
|
||||
failureReason:
|
||||
uploadFileEndMutation.failureReason ??
|
||||
uploadFileChunkMutation.failureReason ??
|
||||
uploadFileStartMutation.failureReason,
|
||||
isError:
|
||||
uploadFileStartMutation.isError ||
|
||||
uploadFileChunkMutation.isError ||
|
||||
uploadFileEndMutation.isError,
|
||||
error:
|
||||
uploadFileEndMutation.error ?? uploadFileChunkMutation.error ?? uploadFileStartMutation.error,
|
||||
isPaused:
|
||||
uploadFileStartMutation.isPaused ||
|
||||
uploadFileChunkMutation.isPaused ||
|
||||
uploadFileEndMutation.isPaused,
|
||||
isPending:
|
||||
uploadFileStartMutation.isPending ||
|
||||
uploadFileChunkMutation.isPending ||
|
||||
uploadFileEndMutation.isPending,
|
||||
isSuccess: uploadFileEndMutation.isSuccess,
|
||||
}
|
||||
}
|
1276
app/gui/src/dashboard/hooks/backendHooks.tsx
Normal file
1276
app/gui/src/dashboard/hooks/backendHooks.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@ -35,7 +35,7 @@ import ManagePermissionsModal from '#/modals/ManagePermissionsModal'
|
||||
import * as backendModule from '#/services/Backend'
|
||||
import * as localBackendModule from '#/services/LocalBackend'
|
||||
|
||||
import { useUploadFileWithToastMutation } from '#/hooks/backendHooks'
|
||||
import { useNewProject, useUploadFileWithToastMutation } from '#/hooks/backendHooks'
|
||||
import {
|
||||
usePasteData,
|
||||
useSetAssetPanelProps,
|
||||
@ -105,6 +105,8 @@ export default function AssetContextMenu(props: AssetContextMenuProps) {
|
||||
const { isFeatureUnderPaywall } = billingHooks.usePaywall({ plan: user.plan })
|
||||
const isUnderPaywall = isFeatureUnderPaywall('share')
|
||||
|
||||
const newProject = useNewProject(backend, category)
|
||||
|
||||
const systemApi = window.systemApi
|
||||
const ownsThisAsset = !isCloud || self?.permission === permissions.PermissionAction.own
|
||||
const managesThisAsset = ownsThisAsset || self?.permission === permissions.PermissionAction.admin
|
||||
@ -225,14 +227,11 @@ export default function AssetContextMenu(props: AssetContextMenuProps) {
|
||||
hidden={hidden}
|
||||
action="useInNewProject"
|
||||
doAction={() => {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newProject,
|
||||
parentId: asset.parentId,
|
||||
parentKey: asset.parentId,
|
||||
templateId: null,
|
||||
datalinkId: asset.id,
|
||||
preferredName: asset.title,
|
||||
})
|
||||
void newProject(
|
||||
{ templateName: asset.title, datalinkId: asset.id },
|
||||
asset.parentId,
|
||||
path,
|
||||
)
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
@ -513,9 +512,11 @@ export default function AssetContextMenu(props: AssetContextMenuProps) {
|
||||
<GlobalContextMenu
|
||||
hidden={hidden}
|
||||
backend={backend}
|
||||
category={category}
|
||||
rootDirectoryId={rootDirectoryId}
|
||||
directoryKey={asset.id}
|
||||
directoryId={asset.id}
|
||||
path={path}
|
||||
doPaste={doPaste}
|
||||
/>
|
||||
)}
|
||||
|
@ -16,15 +16,8 @@ import {
|
||||
type SetStateAction,
|
||||
} from 'react'
|
||||
|
||||
import {
|
||||
useMutation,
|
||||
useQueries,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
useSuspenseQuery,
|
||||
} from '@tanstack/react-query'
|
||||
import { useMutation, useQueries, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { toast } from 'react-toastify'
|
||||
import invariant from 'tiny-invariant'
|
||||
import * as z from 'zod'
|
||||
|
||||
import { uniqueString } from 'enso-common/src/utilities/uniqueString'
|
||||
@ -61,7 +54,8 @@ import {
|
||||
backendMutationOptions,
|
||||
listDirectoryQueryOptions,
|
||||
useBackendQuery,
|
||||
useUploadFileWithToastMutation,
|
||||
useRootDirectoryId,
|
||||
useUploadFiles,
|
||||
} from '#/hooks/backendHooks'
|
||||
import { useEventCallback } from '#/hooks/eventCallbackHooks'
|
||||
import { useIntersectionRatio } from '#/hooks/intersectionHooks'
|
||||
@ -76,7 +70,6 @@ import {
|
||||
type Category,
|
||||
} from '#/layouts/CategorySwitcher/Category'
|
||||
import DragModal from '#/modals/DragModal'
|
||||
import DuplicateAssetsModal from '#/modals/DuplicateAssetsModal'
|
||||
import UpsertSecretModal from '#/modals/UpsertSecretModal'
|
||||
import { useFullUserSession } from '#/providers/AuthProvider'
|
||||
import {
|
||||
@ -86,6 +79,7 @@ import {
|
||||
} from '#/providers/BackendProvider'
|
||||
import {
|
||||
useDriveStore,
|
||||
useExpandedDirectoryIds,
|
||||
useResetAssetPanelProps,
|
||||
useSetAssetPanelProps,
|
||||
useSetCanCreateAssets,
|
||||
@ -97,10 +91,11 @@ import {
|
||||
useSetSuggestions,
|
||||
useSetTargetDirectory,
|
||||
useSetVisuallySelectedKeys,
|
||||
useToggleDirectoryExpansion,
|
||||
} from '#/providers/DriveProvider'
|
||||
import { useFeatureFlag } from '#/providers/FeatureFlagsProvider'
|
||||
import { useInputBindings } from '#/providers/InputBindingsProvider'
|
||||
import { useLocalStorage, useLocalStorageState } from '#/providers/LocalStorageProvider'
|
||||
import { useLocalStorage } from '#/providers/LocalStorageProvider'
|
||||
import { useSetModal } from '#/providers/ModalProvider'
|
||||
import { useNavigator2D } from '#/providers/Navigator2DProvider'
|
||||
import { useLaunchedProjects } from '#/providers/ProjectsProvider'
|
||||
@ -108,39 +103,24 @@ import { useText } from '#/providers/TextProvider'
|
||||
import type Backend from '#/services/Backend'
|
||||
import {
|
||||
assetIsDirectory,
|
||||
assetIsFile,
|
||||
assetIsProject,
|
||||
AssetType,
|
||||
BackendType,
|
||||
createPlaceholderAssetId,
|
||||
createPlaceholderFileAsset,
|
||||
createPlaceholderProjectAsset,
|
||||
createRootDirectoryAsset,
|
||||
createSpecialEmptyAsset,
|
||||
createSpecialErrorAsset,
|
||||
createSpecialLoadingAsset,
|
||||
DatalinkId,
|
||||
DirectoryId,
|
||||
escapeSpecialCharacters,
|
||||
extractProjectExtension,
|
||||
fileIsNotProject,
|
||||
fileIsProject,
|
||||
getAssetPermissionName,
|
||||
Path,
|
||||
Plan,
|
||||
ProjectId,
|
||||
ProjectState,
|
||||
SecretId,
|
||||
stripProjectExtension,
|
||||
type AnyAsset,
|
||||
type AssetId,
|
||||
type DatalinkAsset,
|
||||
type DirectoryAsset,
|
||||
type DirectoryId,
|
||||
type LabelName,
|
||||
type ProjectAsset,
|
||||
type SecretAsset,
|
||||
} from '#/services/Backend'
|
||||
import LocalBackend from '#/services/LocalBackend'
|
||||
import { isSpecialReadonlyDirectoryId } from '#/services/RemoteBackend'
|
||||
import { ROOT_PARENT_DIRECTORY_ID } from '#/services/remoteBackendPaths'
|
||||
import type { AssetQueryKey } from '#/utilities/AssetQuery'
|
||||
@ -302,11 +282,6 @@ export interface AssetsTableState {
|
||||
readonly setQuery: Dispatch<SetStateAction<AssetQuery>>
|
||||
readonly nodeMap: Readonly<MutableRefObject<ReadonlyMap<AssetId, AnyAssetTreeNode>>>
|
||||
readonly hideColumn: (column: Column) => void
|
||||
readonly doToggleDirectoryExpansion: (
|
||||
directoryId: DirectoryId,
|
||||
key: DirectoryId,
|
||||
override?: boolean,
|
||||
) => void
|
||||
readonly doCopy: () => void
|
||||
readonly doCut: () => void
|
||||
readonly doPaste: (newParentKey: DirectoryId, newParentId: DirectoryId) => void
|
||||
@ -380,24 +355,9 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
|
||||
const { data: users } = useBackendQuery(backend, 'listUsers', [])
|
||||
const { data: userGroups } = useBackendQuery(backend, 'listUserGroups', [])
|
||||
const organizationQuery = useSuspenseQuery({
|
||||
queryKey: [backend.type, 'getOrganization'],
|
||||
queryFn: () => backend.getOrganization(),
|
||||
})
|
||||
|
||||
const organization = organizationQuery.data
|
||||
|
||||
const nameOfProjectToImmediatelyOpenRef = useRef(initialProjectName)
|
||||
const [localRootDirectory] = useLocalStorageState('localRootDirectory')
|
||||
const rootDirectoryId = useMemo(() => {
|
||||
const localRootPath = localRootDirectory != null ? Path(localRootDirectory) : null
|
||||
const id =
|
||||
'homeDirectoryId' in category ?
|
||||
category.homeDirectoryId
|
||||
: backend.rootDirectoryId(user, organization, localRootPath)
|
||||
invariant(id, 'Missing root directory')
|
||||
return id
|
||||
}, [category, backend, user, organization, localRootDirectory])
|
||||
const rootDirectoryId = useRootDirectoryId(backend, category)
|
||||
|
||||
const rootDirectory = useMemo(() => createRootDirectoryAsset(rootDirectoryId), [rootDirectoryId])
|
||||
|
||||
@ -405,16 +365,12 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
const assetsTableBackgroundRefreshInterval = useFeatureFlag(
|
||||
'assetsTableBackgroundRefreshInterval',
|
||||
)
|
||||
/**
|
||||
* The expanded directories in the asset tree.
|
||||
* We don't include the root directory as it might change when a user switches
|
||||
* between items in sidebar and we don't want to reset the expanded state using useEffect.
|
||||
*/
|
||||
const [privateExpandedDirectoryIds, setExpandedDirectoryIds] = useState<DirectoryId[]>(() => [])
|
||||
const expandedDirectoryIdsRaw = useExpandedDirectoryIds()
|
||||
const toggleDirectoryExpansion = useToggleDirectoryExpansion()
|
||||
|
||||
const expandedDirectoryIds = useMemo(
|
||||
() => [rootDirectoryId].concat(privateExpandedDirectoryIds),
|
||||
[privateExpandedDirectoryIds, rootDirectoryId],
|
||||
() => [rootDirectoryId].concat(expandedDirectoryIdsRaw),
|
||||
[expandedDirectoryIdsRaw, rootDirectoryId],
|
||||
)
|
||||
|
||||
const expandedDirectoryIdsSet = useMemo(
|
||||
@ -422,13 +378,9 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
[expandedDirectoryIds],
|
||||
)
|
||||
|
||||
const createProjectMutation = useMutation(backendMutationOptions(backend, 'createProject'))
|
||||
const uploadFiles = useUploadFiles(backend, category)
|
||||
const duplicateProjectMutation = useMutation(backendMutationOptions(backend, 'duplicateProject'))
|
||||
const createDirectoryMutation = useMutation(backendMutationOptions(backend, 'createDirectory'))
|
||||
const createSecretMutation = useMutation(backendMutationOptions(backend, 'createSecret'))
|
||||
const updateSecretMutation = useMutation(backendMutationOptions(backend, 'updateSecret'))
|
||||
const createDatalinkMutation = useMutation(backendMutationOptions(backend, 'createDatalink'))
|
||||
const uploadFileMutation = useUploadFileWithToastMutation(backend)
|
||||
const copyAssetMutation = useMutation(backendMutationOptions(backend, 'copyAsset'))
|
||||
const deleteAssetMutation = useMutation(backendMutationOptions(backend, 'deleteAsset'))
|
||||
const undoDeleteAssetMutation = useMutation(backendMutationOptions(backend, 'undoDeleteAsset'))
|
||||
@ -1193,28 +1145,6 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
[driveStore, resetAssetPanelProps, setIsAssetPanelTemporarilyVisible],
|
||||
)
|
||||
|
||||
const doToggleDirectoryExpansion = useEventCallback(
|
||||
(directoryId: DirectoryId, _key: DirectoryId, override?: boolean) => {
|
||||
const isExpanded = expandedDirectoryIdsSet.has(directoryId)
|
||||
const shouldExpand = override ?? !isExpanded
|
||||
|
||||
if (shouldExpand !== isExpanded) {
|
||||
startTransition(() => {
|
||||
if (shouldExpand) {
|
||||
setExpandedDirectoryIds((currentExpandedDirectoryIds) => [
|
||||
...currentExpandedDirectoryIds,
|
||||
directoryId,
|
||||
])
|
||||
} else {
|
||||
setExpandedDirectoryIds((currentExpandedDirectoryIds) =>
|
||||
currentExpandedDirectoryIds.filter((id) => id !== directoryId),
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
const doCopyOnBackend = useEventCallback(
|
||||
async (newParentId: DirectoryId | null, asset: AnyAsset) => {
|
||||
try {
|
||||
@ -1252,11 +1182,7 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
resetAssetPanelProps()
|
||||
}
|
||||
if (asset.type === AssetType.directory) {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.closeFolder,
|
||||
id: asset.id,
|
||||
key: asset.id,
|
||||
})
|
||||
toggleDirectoryExpansion(asset.id, false)
|
||||
}
|
||||
try {
|
||||
if (asset.type === AssetType.project && backend.type === BackendType.local) {
|
||||
@ -1326,7 +1252,7 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
case AssetType.directory: {
|
||||
event.preventDefault()
|
||||
event.stopPropagation()
|
||||
doToggleDirectoryExpansion(item.item.id, item.key)
|
||||
toggleDirectoryExpansion(item.item.id)
|
||||
break
|
||||
}
|
||||
case AssetType.project: {
|
||||
@ -1378,7 +1304,7 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
// The folder is expanded; collapse it.
|
||||
event.preventDefault()
|
||||
event.stopPropagation()
|
||||
doToggleDirectoryExpansion(item.item.id, item.key, false)
|
||||
toggleDirectoryExpansion(item.item.id, false)
|
||||
} else if (prevIndex != null) {
|
||||
// Focus parent if there is one.
|
||||
let index = prevIndex - 1
|
||||
@ -1403,7 +1329,7 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
// The folder is collapsed; expand it.
|
||||
event.preventDefault()
|
||||
event.stopPropagation()
|
||||
doToggleDirectoryExpansion(item.item.id, item.key, true)
|
||||
toggleDirectoryExpansion(item.item.id, true)
|
||||
}
|
||||
break
|
||||
}
|
||||
@ -1493,23 +1419,6 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
}
|
||||
}, [setMostRecentlySelectedIndex])
|
||||
|
||||
const getNewProjectName = useEventCallback(
|
||||
(templateName: string | null, parentKey: DirectoryId | null) => {
|
||||
const prefix = `${templateName ?? 'New Project'} `
|
||||
const projectNameTemplate = new RegExp(`^${prefix}(?<projectIndex>\\d+)$`)
|
||||
const siblings =
|
||||
parentKey == null ?
|
||||
assetTree.children ?? []
|
||||
: nodeMapRef.current.get(parentKey)?.children ?? []
|
||||
const projectIndices = siblings
|
||||
.map((node) => node.item)
|
||||
.filter(assetIsProject)
|
||||
.map((item) => projectNameTemplate.exec(item.title)?.groups?.projectIndex)
|
||||
.map((maybeIndex) => (maybeIndex != null ? parseInt(maybeIndex, 10) : 0))
|
||||
return `${prefix}${Math.max(0, ...projectIndices) + 1}`
|
||||
},
|
||||
)
|
||||
|
||||
const deleteAsset = useEventCallback((assetId: AssetId) => {
|
||||
const asset = nodeMapRef.current.get(assetId)?.item
|
||||
|
||||
@ -1529,379 +1438,6 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
|
||||
const onAssetListEvent = useEventCallback((event: AssetListEvent) => {
|
||||
switch (event.type) {
|
||||
case AssetListEventType.newFolder: {
|
||||
const parent = nodeMapRef.current.get(event.parentKey)
|
||||
const siblings = parent?.children ?? []
|
||||
const directoryIndices = siblings
|
||||
.map((node) => node.item)
|
||||
.filter(assetIsDirectory)
|
||||
.map((item) => /^New Folder (?<directoryIndex>\d+)$/.exec(item.title))
|
||||
.map((match) => match?.groups?.directoryIndex)
|
||||
.map((maybeIndex) => (maybeIndex != null ? parseInt(maybeIndex, 10) : 0))
|
||||
const title = `New Folder ${Math.max(0, ...directoryIndices) + 1}`
|
||||
const placeholderItem: DirectoryAsset = {
|
||||
type: AssetType.directory,
|
||||
id: DirectoryId(uniqueString()),
|
||||
title,
|
||||
modifiedAt: toRfc3339(new Date()),
|
||||
parentId: event.parentId,
|
||||
permissions: tryCreateOwnerPermission(
|
||||
`${parent?.path ?? ''}/${title}`,
|
||||
category,
|
||||
user,
|
||||
users ?? [],
|
||||
userGroups ?? [],
|
||||
),
|
||||
projectState: null,
|
||||
extension: null,
|
||||
labels: [],
|
||||
description: null,
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
}
|
||||
|
||||
doToggleDirectoryExpansion(event.parentId, event.parentKey, true)
|
||||
|
||||
void createDirectoryMutation
|
||||
.mutateAsync([{ parentId: placeholderItem.parentId, title: placeholderItem.title }])
|
||||
.then(({ id }) => {
|
||||
setNewestFolderId(id)
|
||||
setSelectedKeys(new Set([id]))
|
||||
})
|
||||
|
||||
break
|
||||
}
|
||||
case AssetListEventType.newProject: {
|
||||
const parent = nodeMapRef.current.get(event.parentKey)
|
||||
const projectName = getNewProjectName(event.preferredName, event.parentId)
|
||||
const dummyId = createPlaceholderAssetId(AssetType.project)
|
||||
const path =
|
||||
backend instanceof LocalBackend ? backend.joinPath(event.parentId, projectName) : null
|
||||
const placeholderItem: ProjectAsset = {
|
||||
type: AssetType.project,
|
||||
id: dummyId,
|
||||
title: projectName,
|
||||
modifiedAt: toRfc3339(new Date()),
|
||||
parentId: event.parentId,
|
||||
permissions: tryCreateOwnerPermission(
|
||||
`${parent?.path ?? ''}/${projectName}`,
|
||||
category,
|
||||
user,
|
||||
users ?? [],
|
||||
userGroups ?? [],
|
||||
),
|
||||
projectState: {
|
||||
type: ProjectState.placeholder,
|
||||
volumeId: '',
|
||||
openedBy: user.email,
|
||||
...(path != null ? { path } : {}),
|
||||
},
|
||||
extension: null,
|
||||
labels: [],
|
||||
description: null,
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
}
|
||||
|
||||
doToggleDirectoryExpansion(event.parentId, event.parentKey, true)
|
||||
|
||||
void createProjectMutation
|
||||
.mutateAsync([
|
||||
{
|
||||
parentDirectoryId: placeholderItem.parentId,
|
||||
projectName: placeholderItem.title,
|
||||
...(event.templateId == null ? {} : { projectTemplateName: event.templateId }),
|
||||
...(event.datalinkId == null ? {} : { datalinkId: event.datalinkId }),
|
||||
},
|
||||
])
|
||||
.catch((error) => {
|
||||
event.onError?.()
|
||||
|
||||
deleteAsset(placeholderItem.id)
|
||||
toastAndLog('createProjectError', error)
|
||||
|
||||
throw error
|
||||
})
|
||||
.then((createdProject) => {
|
||||
event.onCreated?.(createdProject, placeholderItem.parentId)
|
||||
|
||||
doOpenProject({
|
||||
id: createdProject.projectId,
|
||||
type: backend.type,
|
||||
parentId: placeholderItem.parentId,
|
||||
title: placeholderItem.title,
|
||||
})
|
||||
})
|
||||
|
||||
break
|
||||
}
|
||||
case AssetListEventType.uploadFiles: {
|
||||
const localBackend = backend instanceof LocalBackend ? backend : null
|
||||
const reversedFiles = Array.from(event.files).reverse()
|
||||
const parent = nodeMapRef.current.get(event.parentKey)
|
||||
const siblingNodes = parent?.children ?? []
|
||||
const siblings = siblingNodes.map((node) => node.item)
|
||||
const siblingFiles = siblings.filter(assetIsFile)
|
||||
const siblingProjects = siblings.filter(assetIsProject)
|
||||
const siblingFileTitles = new Set(siblingFiles.map((asset) => asset.title))
|
||||
const siblingProjectTitles = new Set(siblingProjects.map((asset) => asset.title))
|
||||
|
||||
const ownerPermission = tryCreateOwnerPermission(
|
||||
parent?.path ?? '',
|
||||
category,
|
||||
user,
|
||||
users ?? [],
|
||||
userGroups ?? [],
|
||||
)
|
||||
|
||||
const files = reversedFiles.filter(fileIsNotProject).map((file) => {
|
||||
const asset = createPlaceholderFileAsset(
|
||||
escapeSpecialCharacters(file.name),
|
||||
event.parentId,
|
||||
ownerPermission,
|
||||
)
|
||||
return { asset, file }
|
||||
})
|
||||
const projects = reversedFiles.filter(fileIsProject).map((file) => {
|
||||
const basename = escapeSpecialCharacters(stripProjectExtension(file.name))
|
||||
const asset = createPlaceholderProjectAsset(
|
||||
basename,
|
||||
event.parentId,
|
||||
ownerPermission,
|
||||
user,
|
||||
localBackend?.joinPath(event.parentId, basename) ?? null,
|
||||
)
|
||||
|
||||
return { asset, file }
|
||||
})
|
||||
const duplicateFiles = files.filter((file) => siblingFileTitles.has(file.asset.title))
|
||||
const duplicateProjects = projects.filter((project) =>
|
||||
siblingProjectTitles.has(stripProjectExtension(project.asset.title)),
|
||||
)
|
||||
const fileMap = new Map<AssetId, File>([
|
||||
...files.map(({ asset, file }) => [asset.id, file] as const),
|
||||
...projects.map(({ asset, file }) => [asset.id, file] as const),
|
||||
])
|
||||
const uploadedFileIds: AssetId[] = []
|
||||
const addIdToSelection = (id: AssetId) => {
|
||||
uploadedFileIds.push(id)
|
||||
const newIds = new Set(uploadedFileIds)
|
||||
setSelectedKeys(newIds)
|
||||
}
|
||||
|
||||
const doUploadFile = async (asset: AnyAsset, method: 'new' | 'update') => {
|
||||
const file = fileMap.get(asset.id)
|
||||
|
||||
if (file != null) {
|
||||
const fileId = method === 'new' ? null : asset.id
|
||||
|
||||
switch (true) {
|
||||
case assetIsProject(asset): {
|
||||
const { extension } = extractProjectExtension(file.name)
|
||||
const title = escapeSpecialCharacters(stripProjectExtension(asset.title))
|
||||
|
||||
await uploadFileMutation
|
||||
.mutateAsync(
|
||||
{
|
||||
fileId,
|
||||
fileName: `${title}.${extension}`,
|
||||
parentDirectoryId: asset.parentId,
|
||||
},
|
||||
file,
|
||||
)
|
||||
.then(({ id }) => {
|
||||
addIdToSelection(id)
|
||||
})
|
||||
.catch((error) => {
|
||||
toastAndLog('uploadProjectError', error)
|
||||
})
|
||||
|
||||
break
|
||||
}
|
||||
case assetIsFile(asset): {
|
||||
const title = escapeSpecialCharacters(asset.title)
|
||||
await uploadFileMutation
|
||||
.mutateAsync({ fileId, fileName: title, parentDirectoryId: asset.parentId }, file)
|
||||
.then(({ id }) => {
|
||||
addIdToSelection(id)
|
||||
})
|
||||
|
||||
break
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (duplicateFiles.length === 0 && duplicateProjects.length === 0) {
|
||||
const assets = [...files, ...projects].map(({ asset }) => asset)
|
||||
|
||||
doToggleDirectoryExpansion(event.parentId, event.parentKey, true)
|
||||
|
||||
void Promise.all(assets.map((asset) => doUploadFile(asset, 'new')))
|
||||
} else {
|
||||
const siblingFilesByName = new Map(siblingFiles.map((file) => [file.title, file]))
|
||||
const siblingProjectsByName = new Map(
|
||||
siblingProjects.map((project) => [project.title, project]),
|
||||
)
|
||||
const conflictingFiles = duplicateFiles.map((file) => ({
|
||||
// This is SAFE, as `duplicateFiles` only contains files that have siblings
|
||||
// with the same name.
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
current: siblingFilesByName.get(file.asset.title)!,
|
||||
new: createPlaceholderFileAsset(file.asset.title, event.parentId, ownerPermission),
|
||||
file: file.file,
|
||||
}))
|
||||
const conflictingProjects = duplicateProjects.map((project) => {
|
||||
const basename = stripProjectExtension(project.asset.title)
|
||||
return {
|
||||
// This is SAFE, as `duplicateProjects` only contains projects that have
|
||||
// siblings with the same name.
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
current: siblingProjectsByName.get(basename)!,
|
||||
new: createPlaceholderProjectAsset(
|
||||
basename,
|
||||
event.parentId,
|
||||
ownerPermission,
|
||||
user,
|
||||
localBackend?.joinPath(event.parentId, basename) ?? null,
|
||||
),
|
||||
file: project.file,
|
||||
}
|
||||
})
|
||||
setModal(
|
||||
<DuplicateAssetsModal
|
||||
parentKey={event.parentKey}
|
||||
parentId={event.parentId}
|
||||
conflictingFiles={conflictingFiles}
|
||||
conflictingProjects={conflictingProjects}
|
||||
siblingFileNames={siblingFilesByName.keys()}
|
||||
siblingProjectNames={siblingProjectsByName.keys()}
|
||||
nonConflictingFileCount={files.length - conflictingFiles.length}
|
||||
nonConflictingProjectCount={projects.length - conflictingProjects.length}
|
||||
doUpdateConflicting={async (resolvedConflicts) => {
|
||||
await Promise.allSettled(
|
||||
resolvedConflicts.map((conflict) => {
|
||||
const isUpdating = conflict.current.title === conflict.new.title
|
||||
const asset = isUpdating ? conflict.current : conflict.new
|
||||
fileMap.set(asset.id, conflict.file)
|
||||
return doUploadFile(asset, isUpdating ? 'update' : 'new')
|
||||
}),
|
||||
)
|
||||
}}
|
||||
doUploadNonConflicting={async () => {
|
||||
doToggleDirectoryExpansion(event.parentId, event.parentKey, true)
|
||||
|
||||
const newFiles = files
|
||||
.filter((file) => !siblingFileTitles.has(file.asset.title))
|
||||
.map((file) => {
|
||||
const asset = createPlaceholderFileAsset(
|
||||
file.asset.title,
|
||||
event.parentId,
|
||||
ownerPermission,
|
||||
)
|
||||
fileMap.set(asset.id, file.file)
|
||||
return asset
|
||||
})
|
||||
|
||||
const newProjects = projects
|
||||
.filter(
|
||||
(project) =>
|
||||
!siblingProjectTitles.has(stripProjectExtension(project.asset.title)),
|
||||
)
|
||||
.map((project) => {
|
||||
const basename = stripProjectExtension(project.asset.title)
|
||||
const asset = createPlaceholderProjectAsset(
|
||||
basename,
|
||||
event.parentId,
|
||||
ownerPermission,
|
||||
user,
|
||||
localBackend?.joinPath(event.parentId, basename) ?? null,
|
||||
)
|
||||
fileMap.set(asset.id, project.file)
|
||||
return asset
|
||||
})
|
||||
|
||||
const assets = [...newFiles, ...newProjects]
|
||||
|
||||
await Promise.allSettled(assets.map((asset) => doUploadFile(asset, 'new')))
|
||||
}}
|
||||
/>,
|
||||
)
|
||||
}
|
||||
break
|
||||
}
|
||||
case AssetListEventType.newDatalink: {
|
||||
const parent = nodeMapRef.current.get(event.parentKey)
|
||||
const placeholderItem: DatalinkAsset = {
|
||||
type: AssetType.datalink,
|
||||
id: DatalinkId(uniqueString()),
|
||||
title: event.name,
|
||||
modifiedAt: toRfc3339(new Date()),
|
||||
parentId: event.parentId,
|
||||
permissions: tryCreateOwnerPermission(
|
||||
`${parent?.path ?? ''}/${event.name}`,
|
||||
category,
|
||||
user,
|
||||
users ?? [],
|
||||
userGroups ?? [],
|
||||
),
|
||||
projectState: null,
|
||||
extension: null,
|
||||
labels: [],
|
||||
description: null,
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
}
|
||||
doToggleDirectoryExpansion(event.parentId, event.parentKey, true)
|
||||
|
||||
createDatalinkMutation.mutate([
|
||||
{
|
||||
parentDirectoryId: placeholderItem.parentId,
|
||||
datalinkId: null,
|
||||
name: placeholderItem.title,
|
||||
value: event.value,
|
||||
},
|
||||
])
|
||||
|
||||
break
|
||||
}
|
||||
case AssetListEventType.newSecret: {
|
||||
const parent = nodeMapRef.current.get(event.parentKey)
|
||||
const placeholderItem: SecretAsset = {
|
||||
type: AssetType.secret,
|
||||
id: SecretId(uniqueString()),
|
||||
title: event.name,
|
||||
modifiedAt: toRfc3339(new Date()),
|
||||
parentId: event.parentId,
|
||||
permissions: tryCreateOwnerPermission(
|
||||
`${parent?.path ?? ''}/${event.name}`,
|
||||
category,
|
||||
user,
|
||||
users ?? [],
|
||||
userGroups ?? [],
|
||||
),
|
||||
projectState: null,
|
||||
extension: null,
|
||||
labels: [],
|
||||
description: null,
|
||||
parentsPath: '',
|
||||
virtualParentsPath: '',
|
||||
}
|
||||
|
||||
doToggleDirectoryExpansion(event.parentId, event.parentKey, true)
|
||||
|
||||
createSecretMutation.mutate([
|
||||
{
|
||||
parentDirectoryId: placeholderItem.parentId,
|
||||
name: placeholderItem.title,
|
||||
value: event.value,
|
||||
},
|
||||
])
|
||||
|
||||
break
|
||||
}
|
||||
case AssetListEventType.duplicateProject: {
|
||||
const parent = nodeMapRef.current.get(event.parentKey)
|
||||
const siblings = parent?.children ?? []
|
||||
@ -1998,10 +1534,6 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
dispatchAssetEvent({ type: AssetEventType.removeSelf, id: event.id })
|
||||
break
|
||||
}
|
||||
case AssetListEventType.closeFolder: {
|
||||
doToggleDirectoryExpansion(event.id, event.key, false)
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@ -2050,7 +1582,7 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
if (pasteData.data.ids.has(newParentKey)) {
|
||||
toast.error('Cannot paste a folder into itself.')
|
||||
} else {
|
||||
doToggleDirectoryExpansion(newParentId, newParentKey, true)
|
||||
toggleDirectoryExpansion(newParentId, true)
|
||||
if (pasteData.type === 'copy') {
|
||||
const assets = Array.from(pasteData.data.ids, (id) => nodeMapRef.current.get(id)).flatMap(
|
||||
(asset) => (asset ? [asset.item] : []),
|
||||
@ -2124,12 +1656,7 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
if (event.dataTransfer.types.includes('Files')) {
|
||||
event.preventDefault()
|
||||
event.stopPropagation()
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.uploadFiles,
|
||||
parentKey: rootDirectoryId,
|
||||
parentId: rootDirectoryId,
|
||||
files: Array.from(event.dataTransfer.files),
|
||||
})
|
||||
void uploadFiles(Array.from(event.dataTransfer.files), rootDirectoryId, rootDirectoryId)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2147,7 +1674,6 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
setQuery,
|
||||
nodeMap: nodeMapRef,
|
||||
hideColumn,
|
||||
doToggleDirectoryExpansion,
|
||||
doCopy,
|
||||
doCut,
|
||||
doPaste,
|
||||
@ -2162,7 +1688,6 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
category,
|
||||
sortInfo,
|
||||
query,
|
||||
doToggleDirectoryExpansion,
|
||||
doCopy,
|
||||
doCut,
|
||||
doPaste,
|
||||
@ -2702,12 +2227,7 @@ export default function AssetsTable(props: AssetsTableProps) {
|
||||
>
|
||||
<FileTrigger
|
||||
onSelect={(event) => {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.uploadFiles,
|
||||
parentKey: rootDirectoryId,
|
||||
parentId: rootDirectoryId,
|
||||
files: Array.from(event ?? []),
|
||||
})
|
||||
void uploadFiles(Array.from(event ?? []), rootDirectoryId, rootDirectoryId)
|
||||
}}
|
||||
>
|
||||
<Button
|
||||
|
@ -245,9 +245,11 @@ export default function AssetsTableContextMenu(props: AssetsTableContextMenuProp
|
||||
<GlobalContextMenu
|
||||
hidden={hidden}
|
||||
backend={backend}
|
||||
category={category}
|
||||
rootDirectoryId={rootDirectoryId}
|
||||
directoryKey={null}
|
||||
directoryId={null}
|
||||
path={null}
|
||||
doPaste={doPaste}
|
||||
/>
|
||||
)}
|
||||
|
@ -1,8 +1,6 @@
|
||||
/** @file The directory header bar and directory item listing. */
|
||||
import * as React from 'react'
|
||||
|
||||
import invariant from 'tiny-invariant'
|
||||
|
||||
import * as appUtils from '#/appUtils'
|
||||
|
||||
import * as offlineHooks from '#/hooks/offlineHooks'
|
||||
@ -26,16 +24,10 @@ import Labels from '#/layouts/Labels'
|
||||
import * as ariaComponents from '#/components/AriaComponents'
|
||||
import * as result from '#/components/Result'
|
||||
|
||||
import * as backendModule from '#/services/Backend'
|
||||
|
||||
import { useEventCallback } from '#/hooks/eventCallbackHooks'
|
||||
import { useDriveStore } from '#/providers/DriveProvider'
|
||||
import { useLocalStorageState } from '#/providers/LocalStorageProvider'
|
||||
import AssetQuery from '#/utilities/AssetQuery'
|
||||
import * as download from '#/utilities/download'
|
||||
import * as github from '#/utilities/github'
|
||||
import * as tailwindMerge from '#/utilities/tailwindMerge'
|
||||
import { useSuspenseQuery } from '@tanstack/react-query'
|
||||
|
||||
// =============
|
||||
// === Drive ===
|
||||
@ -62,27 +54,6 @@ export default function Drive(props: DriveProps) {
|
||||
const { getText } = textProvider.useText()
|
||||
const dispatchAssetListEvent = eventListProvider.useDispatchAssetListEvent()
|
||||
const [query, setQuery] = React.useState(() => AssetQuery.fromString(''))
|
||||
const organizationQuery = useSuspenseQuery({
|
||||
queryKey: [backend.type, 'getOrganization'],
|
||||
queryFn: () => backend.getOrganization(),
|
||||
})
|
||||
const organization = organizationQuery.data ?? null
|
||||
const [localRootDirectory] = useLocalStorageState('localRootDirectory')
|
||||
const rootDirectoryId = React.useMemo(() => {
|
||||
switch (category.type) {
|
||||
case 'user':
|
||||
case 'team': {
|
||||
return category.homeDirectoryId
|
||||
}
|
||||
default: {
|
||||
const localRootPath =
|
||||
localRootDirectory != null ? backendModule.Path(localRootDirectory) : null
|
||||
const id = backend.rootDirectoryId(user, organization, localRootPath)
|
||||
invariant(id, 'Missing root directory')
|
||||
return id
|
||||
}
|
||||
}
|
||||
}, [category, backend, user, organization, localRootDirectory])
|
||||
const isCloud = categoryModule.isCloudCategory(category)
|
||||
const supportLocalBackend = localBackend != null
|
||||
|
||||
@ -91,82 +62,10 @@ export default function Drive(props: DriveProps) {
|
||||
: isCloud && !user.isEnabled ? 'not-enabled'
|
||||
: 'ok'
|
||||
|
||||
const driveStore = useDriveStore()
|
||||
|
||||
const getTargetDirectory = React.useCallback(
|
||||
() => driveStore.getState().targetDirectory,
|
||||
[driveStore],
|
||||
)
|
||||
|
||||
const doUploadFiles = useEventCallback((files: File[]) => {
|
||||
if (isCloud && isOffline) {
|
||||
// This should never happen, however display a nice error message in case it does.
|
||||
toastAndLog('offlineUploadFilesError')
|
||||
} else {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.uploadFiles,
|
||||
parentKey: getTargetDirectory()?.key ?? rootDirectoryId,
|
||||
parentId: getTargetDirectory()?.item.id ?? rootDirectoryId,
|
||||
files,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const doEmptyTrash = React.useCallback(() => {
|
||||
dispatchAssetListEvent({ type: AssetListEventType.emptyTrash })
|
||||
}, [dispatchAssetListEvent])
|
||||
|
||||
const doCreateProject = useEventCallback(
|
||||
(
|
||||
templateId: string | null = null,
|
||||
templateName: string | null = null,
|
||||
onCreated?: (
|
||||
project: backendModule.CreatedProject,
|
||||
parentId: backendModule.DirectoryId,
|
||||
) => void,
|
||||
onError?: () => void,
|
||||
) => {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newProject,
|
||||
parentKey: getTargetDirectory()?.key ?? rootDirectoryId,
|
||||
parentId: getTargetDirectory()?.item.id ?? rootDirectoryId,
|
||||
templateId,
|
||||
datalinkId: null,
|
||||
preferredName: templateName,
|
||||
...(onCreated ? { onCreated } : {}),
|
||||
...(onError ? { onError } : {}),
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
const doCreateDirectory = useEventCallback(() => {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newFolder,
|
||||
parentKey: getTargetDirectory()?.key ?? rootDirectoryId,
|
||||
parentId: getTargetDirectory()?.item.id ?? rootDirectoryId,
|
||||
})
|
||||
})
|
||||
|
||||
const doCreateSecret = useEventCallback((name: string, value: string) => {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newSecret,
|
||||
parentKey: getTargetDirectory()?.key ?? rootDirectoryId,
|
||||
parentId: getTargetDirectory()?.item.id ?? rootDirectoryId,
|
||||
name,
|
||||
value,
|
||||
})
|
||||
})
|
||||
|
||||
const doCreateDatalink = useEventCallback((name: string, value: unknown) => {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newDatalink,
|
||||
parentKey: getTargetDirectory()?.key ?? rootDirectoryId,
|
||||
parentId: getTargetDirectory()?.item.id ?? rootDirectoryId,
|
||||
name,
|
||||
value,
|
||||
})
|
||||
})
|
||||
|
||||
switch (status) {
|
||||
case 'not-enabled': {
|
||||
return (
|
||||
@ -216,11 +115,6 @@ export default function Drive(props: DriveProps) {
|
||||
setQuery={setQuery}
|
||||
category={category}
|
||||
doEmptyTrash={doEmptyTrash}
|
||||
doCreateProject={doCreateProject}
|
||||
doUploadFiles={doUploadFiles}
|
||||
doCreateDirectory={doCreateDirectory}
|
||||
doCreateSecret={doCreateSecret}
|
||||
doCreateDatalink={doCreateDatalink}
|
||||
/>
|
||||
|
||||
<div className="flex flex-1 gap-drive overflow-hidden">
|
||||
|
@ -4,7 +4,7 @@
|
||||
*/
|
||||
import * as React from 'react'
|
||||
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import { useMutation } from '@tanstack/react-query'
|
||||
|
||||
import AddDatalinkIcon from '#/assets/add_datalink.svg'
|
||||
import AddFolderIcon from '#/assets/add_folder.svg'
|
||||
@ -12,7 +12,6 @@ import AddKeyIcon from '#/assets/add_key.svg'
|
||||
import DataDownloadIcon from '#/assets/data_download.svg'
|
||||
import DataUploadIcon from '#/assets/data_upload.svg'
|
||||
import Plus2Icon from '#/assets/plus2.svg'
|
||||
import { Input as AriaInput } from '#/components/aria'
|
||||
import {
|
||||
Button,
|
||||
ButtonGroup,
|
||||
@ -21,8 +20,16 @@ import {
|
||||
useVisualTooltip,
|
||||
} from '#/components/AriaComponents'
|
||||
import AssetEventType from '#/events/AssetEventType'
|
||||
import {
|
||||
useNewDatalink,
|
||||
useNewFolder,
|
||||
useNewProject,
|
||||
useNewSecret,
|
||||
useRootDirectoryId,
|
||||
useUploadFiles,
|
||||
} from '#/hooks/backendHooks'
|
||||
import { useEventCallback } from '#/hooks/eventCallbackHooks'
|
||||
import { useOffline } from '#/hooks/offlineHooks'
|
||||
import { createGetProjectDetailsQuery } from '#/hooks/projectHooks'
|
||||
import { useSearchParamsState } from '#/hooks/searchParamsStateHooks'
|
||||
import AssetSearchBar from '#/layouts/AssetSearchBar'
|
||||
import { useDispatchAssetEvent } from '#/layouts/AssetsTable/EventListProvider'
|
||||
@ -35,13 +42,16 @@ import StartModal from '#/layouts/StartModal'
|
||||
import ConfirmDeleteModal from '#/modals/ConfirmDeleteModal'
|
||||
import UpsertDatalinkModal from '#/modals/UpsertDatalinkModal'
|
||||
import UpsertSecretModal from '#/modals/UpsertSecretModal'
|
||||
import { useCanCreateAssets, useCanDownload, usePasteData } from '#/providers/DriveProvider'
|
||||
import {
|
||||
useCanCreateAssets,
|
||||
useCanDownload,
|
||||
useDriveStore,
|
||||
usePasteData,
|
||||
} from '#/providers/DriveProvider'
|
||||
import { useInputBindings } from '#/providers/InputBindingsProvider'
|
||||
import { useSetModal } from '#/providers/ModalProvider'
|
||||
import { useText } from '#/providers/TextProvider'
|
||||
import type Backend from '#/services/Backend'
|
||||
import type { DirectoryId } from '#/services/Backend'
|
||||
import { ProjectState, type CreatedProject, type ProjectId } from '#/services/Backend'
|
||||
import type AssetQuery from '#/utilities/AssetQuery'
|
||||
import { inputFiles } from '#/utilities/input'
|
||||
import * as sanitizedEventTargets from '#/utilities/sanitizedEventTargets'
|
||||
@ -58,16 +68,6 @@ export interface DriveBarProps {
|
||||
readonly setQuery: React.Dispatch<React.SetStateAction<AssetQuery>>
|
||||
readonly category: Category
|
||||
readonly doEmptyTrash: () => void
|
||||
readonly doCreateProject: (
|
||||
templateId?: string | null,
|
||||
templateName?: string | null,
|
||||
onCreated?: (project: CreatedProject, parentId: DirectoryId) => void,
|
||||
onError?: () => void,
|
||||
) => void
|
||||
readonly doCreateDirectory: () => void
|
||||
readonly doCreateSecret: (name: string, value: string) => void
|
||||
readonly doCreateDatalink: (name: string, value: unknown) => void
|
||||
readonly doUploadFiles: (files: File[]) => void
|
||||
}
|
||||
|
||||
/**
|
||||
@ -75,9 +75,7 @@ export interface DriveBarProps {
|
||||
* and a column display mode switcher.
|
||||
*/
|
||||
export default function DriveBar(props: DriveBarProps) {
|
||||
const { backend, query, setQuery, category } = props
|
||||
const { doEmptyTrash, doCreateProject, doCreateDirectory } = props
|
||||
const { doCreateSecret, doCreateDatalink, doUploadFiles } = props
|
||||
const { backend, query, setQuery, category, doEmptyTrash } = props
|
||||
|
||||
const [startModalDefaultOpen, , resetStartModalDefaultOpen] = useSearchParamsState(
|
||||
'startModalDefaultOpen',
|
||||
@ -86,11 +84,11 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
|
||||
const { unsetModal } = useSetModal()
|
||||
const { getText } = useText()
|
||||
const driveStore = useDriveStore()
|
||||
const inputBindings = useInputBindings()
|
||||
const dispatchAssetEvent = useDispatchAssetEvent()
|
||||
const canCreateAssets = useCanCreateAssets()
|
||||
const createAssetButtonsRef = React.useRef<HTMLDivElement>(null)
|
||||
const uploadFilesRef = React.useRef<HTMLInputElement>(null)
|
||||
const isCloud = isCloudCategory(category)
|
||||
const { isOffline } = useOffline()
|
||||
const canDownload = useCanDownload()
|
||||
@ -105,12 +103,6 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
targetRef: createAssetButtonsRef,
|
||||
overlayPositionProps: { placement: 'top' },
|
||||
})
|
||||
const [isCreatingProjectFromTemplate, setIsCreatingProjectFromTemplate] = React.useState(false)
|
||||
const [isCreatingProject, setIsCreatingProject] = React.useState(false)
|
||||
const [createdProjectId, setCreatedProjectId] = React.useState<{
|
||||
projectId: ProjectId
|
||||
parentId: DirectoryId
|
||||
} | null>(null)
|
||||
const pasteData = usePasteData()
|
||||
const effectivePasteData =
|
||||
(
|
||||
@ -120,60 +112,64 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
pasteData
|
||||
: null
|
||||
|
||||
const getTargetDirectory = useEventCallback(() => driveStore.getState().targetDirectory)
|
||||
const rootDirectoryId = useRootDirectoryId(backend, category)
|
||||
|
||||
const newFolderRaw = useNewFolder(backend, category)
|
||||
const newFolder = useEventCallback(async () => {
|
||||
const parent = getTargetDirectory()
|
||||
return await newFolderRaw(parent?.directoryId ?? rootDirectoryId, parent?.path)
|
||||
})
|
||||
const uploadFilesRaw = useUploadFiles(backend, category)
|
||||
const uploadFiles = useEventCallback(async (files: readonly File[]) => {
|
||||
const parent = getTargetDirectory()
|
||||
await uploadFilesRaw(files, parent?.directoryId ?? rootDirectoryId, parent?.path)
|
||||
})
|
||||
const newSecretRaw = useNewSecret(backend, category)
|
||||
const newSecret = useEventCallback(async (name: string, value: string) => {
|
||||
const parent = getTargetDirectory()
|
||||
return await newSecretRaw(name, value, parent?.directoryId ?? rootDirectoryId, parent?.path)
|
||||
})
|
||||
const newDatalinkRaw = useNewDatalink(backend, category)
|
||||
const newDatalink = useEventCallback(async (name: string, value: unknown) => {
|
||||
const parent = getTargetDirectory()
|
||||
return await newDatalinkRaw(name, value, parent?.directoryId ?? rootDirectoryId, parent?.path)
|
||||
})
|
||||
const newProjectRaw = useNewProject(backend, category)
|
||||
const newProjectMutation = useMutation({
|
||||
mutationKey: ['newProject'],
|
||||
mutationFn: async ([templateId, templateName]: [
|
||||
templateId: string | null | undefined,
|
||||
templateName: string | null | undefined,
|
||||
]) => {
|
||||
const parent = getTargetDirectory()
|
||||
return await newProjectRaw(
|
||||
{ templateName, templateId },
|
||||
parent?.directoryId ?? rootDirectoryId,
|
||||
parent?.path,
|
||||
)
|
||||
},
|
||||
})
|
||||
const newProject = newProjectMutation.mutateAsync
|
||||
const isCreatingProject = newProjectMutation.isPending
|
||||
|
||||
React.useEffect(() => {
|
||||
return inputBindings.attach(sanitizedEventTargets.document.body, 'keydown', {
|
||||
...(isCloud ?
|
||||
{
|
||||
newFolder: () => {
|
||||
doCreateDirectory()
|
||||
void newFolder()
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
newProject: () => {
|
||||
setIsCreatingProject(true)
|
||||
doCreateProject(
|
||||
null,
|
||||
null,
|
||||
(project, parentId) => {
|
||||
setCreatedProjectId({ projectId: project.projectId, parentId })
|
||||
},
|
||||
() => {
|
||||
setIsCreatingProject(false)
|
||||
},
|
||||
)
|
||||
void newProject([null, null])
|
||||
},
|
||||
uploadFiles: () => {
|
||||
uploadFilesRef.current?.click()
|
||||
void inputFiles().then((files) => uploadFiles(Array.from(files)))
|
||||
},
|
||||
})
|
||||
}, [isCloud, doCreateDirectory, doCreateProject, inputBindings])
|
||||
|
||||
const createdProjectQuery = useQuery({
|
||||
...createGetProjectDetailsQuery({
|
||||
// This is safe because we disable the query when `createdProjectId` is `null`.
|
||||
// see `enabled` property below.
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
assetId: createdProjectId?.projectId as ProjectId,
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
parentId: createdProjectId?.parentId as DirectoryId,
|
||||
backend,
|
||||
}),
|
||||
enabled: createdProjectId != null,
|
||||
})
|
||||
|
||||
const isFetching =
|
||||
(createdProjectQuery.isLoading ||
|
||||
(createdProjectQuery.data &&
|
||||
createdProjectQuery.data.state.type !== ProjectState.opened &&
|
||||
createdProjectQuery.data.state.type !== ProjectState.closing)) ??
|
||||
false
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!isFetching) {
|
||||
setIsCreatingProject(false)
|
||||
setIsCreatingProjectFromTemplate(false)
|
||||
}
|
||||
}, [isFetching])
|
||||
}, [inputBindings, isCloud, newFolder, newProject, uploadFiles])
|
||||
|
||||
const searchBar = (
|
||||
<AssetSearchBar backend={backend} isCloud={isCloud} query={query} setQuery={setQuery} />
|
||||
@ -246,9 +242,8 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
<Button
|
||||
size="medium"
|
||||
variant="accent"
|
||||
isDisabled={shouldBeDisabled || isCreatingProject || isCreatingProjectFromTemplate}
|
||||
isDisabled={shouldBeDisabled || isCreatingProject}
|
||||
icon={Plus2Icon}
|
||||
loading={isCreatingProjectFromTemplate}
|
||||
loaderPosition="icon"
|
||||
>
|
||||
{getText('startWithATemplate')}
|
||||
@ -256,40 +251,18 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
|
||||
<StartModal
|
||||
createProject={(templateId, templateName) => {
|
||||
setIsCreatingProjectFromTemplate(true)
|
||||
doCreateProject(
|
||||
templateId,
|
||||
templateName,
|
||||
({ projectId }, parentId) => {
|
||||
setCreatedProjectId({ projectId, parentId })
|
||||
},
|
||||
() => {
|
||||
setIsCreatingProjectFromTemplate(false)
|
||||
},
|
||||
)
|
||||
void newProject([templateId, templateName])
|
||||
}}
|
||||
/>
|
||||
</DialogTrigger>
|
||||
<Button
|
||||
size="medium"
|
||||
variant="outline"
|
||||
isDisabled={shouldBeDisabled || isCreatingProject || isCreatingProjectFromTemplate}
|
||||
isDisabled={shouldBeDisabled || isCreatingProject}
|
||||
icon={Plus2Icon}
|
||||
loading={isCreatingProject}
|
||||
loaderPosition="icon"
|
||||
onPress={() => {
|
||||
setIsCreatingProject(true)
|
||||
doCreateProject(
|
||||
null,
|
||||
null,
|
||||
({ projectId }, parentId) => {
|
||||
setCreatedProjectId({ projectId, parentId })
|
||||
setIsCreatingProject(false)
|
||||
},
|
||||
() => {
|
||||
setIsCreatingProject(false)
|
||||
},
|
||||
)
|
||||
onPress={async () => {
|
||||
await newProject([null, null])
|
||||
}}
|
||||
>
|
||||
{getText('newEmptyProject')}
|
||||
@ -301,8 +274,8 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
icon={AddFolderIcon}
|
||||
isDisabled={shouldBeDisabled}
|
||||
aria-label={getText('newFolder')}
|
||||
onPress={() => {
|
||||
doCreateDirectory()
|
||||
onPress={async () => {
|
||||
await newFolder()
|
||||
}}
|
||||
/>
|
||||
{isCloud && (
|
||||
@ -314,7 +287,13 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
isDisabled={shouldBeDisabled}
|
||||
aria-label={getText('newSecret')}
|
||||
/>
|
||||
<UpsertSecretModal id={null} name={null} doCreate={doCreateSecret} />
|
||||
<UpsertSecretModal
|
||||
id={null}
|
||||
name={null}
|
||||
doCreate={async (name, value) => {
|
||||
await newSecret(name, value)
|
||||
}}
|
||||
/>
|
||||
</DialogTrigger>
|
||||
)}
|
||||
|
||||
@ -327,23 +306,13 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
isDisabled={shouldBeDisabled}
|
||||
aria-label={getText('newDatalink')}
|
||||
/>
|
||||
<UpsertDatalinkModal doCreate={doCreateDatalink} />
|
||||
<UpsertDatalinkModal
|
||||
doCreate={async (name, value) => {
|
||||
await newDatalink(name, value)
|
||||
}}
|
||||
/>
|
||||
</DialogTrigger>
|
||||
)}
|
||||
<AriaInput
|
||||
ref={uploadFilesRef}
|
||||
type="file"
|
||||
multiple
|
||||
className="hidden"
|
||||
onInput={(event) => {
|
||||
if (event.currentTarget.files != null) {
|
||||
doUploadFiles(Array.from(event.currentTarget.files))
|
||||
}
|
||||
// Clear the list of selected files, otherwise `onInput` will not be
|
||||
// dispatched again if the same file is selected.
|
||||
event.currentTarget.value = ''
|
||||
}}
|
||||
/>
|
||||
<Button
|
||||
variant="icon"
|
||||
size="medium"
|
||||
@ -352,7 +321,7 @@ export default function DriveBar(props: DriveBarProps) {
|
||||
aria-label={getText('uploadFiles')}
|
||||
onPress={async () => {
|
||||
const files = await inputFiles()
|
||||
doUploadFiles(Array.from(files))
|
||||
await uploadFiles(Array.from(files))
|
||||
}}
|
||||
/>
|
||||
<Button
|
||||
|
@ -1,34 +1,38 @@
|
||||
/** @file A context menu available everywhere in the directory. */
|
||||
import { useStore } from 'zustand'
|
||||
|
||||
import AssetListEventType from '#/events/AssetListEventType'
|
||||
|
||||
import ContextMenu from '#/components/ContextMenu'
|
||||
import ContextMenuEntry from '#/components/ContextMenuEntry'
|
||||
|
||||
import UpsertDatalinkModal from '#/modals/UpsertDatalinkModal'
|
||||
import UpsertSecretModal from '#/modals/UpsertSecretModal'
|
||||
|
||||
import { useDispatchAssetListEvent } from '#/layouts/AssetsTable/EventListProvider'
|
||||
import {
|
||||
useNewDatalink,
|
||||
useNewFolder,
|
||||
useNewProject,
|
||||
useNewSecret,
|
||||
useUploadFiles,
|
||||
} from '#/hooks/backendHooks'
|
||||
import { useEventCallback } from '#/hooks/eventCallbackHooks'
|
||||
import type { Category } from '#/layouts/CategorySwitcher/Category'
|
||||
import { useDriveStore } from '#/providers/DriveProvider'
|
||||
import { useSetModal } from '#/providers/ModalProvider'
|
||||
import { useText } from '#/providers/TextProvider'
|
||||
import type * as backendModule from '#/services/Backend'
|
||||
import type Backend from '#/services/Backend'
|
||||
import { BackendType } from '#/services/Backend'
|
||||
import { BackendType, type DirectoryId } from '#/services/Backend'
|
||||
import { inputFiles } from '#/utilities/input'
|
||||
|
||||
/** Props for a {@link GlobalContextMenu}. */
|
||||
export interface GlobalContextMenuProps {
|
||||
readonly hidden?: boolean
|
||||
readonly backend: Backend
|
||||
readonly rootDirectoryId: backendModule.DirectoryId
|
||||
readonly directoryKey: backendModule.DirectoryId | null
|
||||
readonly directoryId: backendModule.DirectoryId | null
|
||||
readonly doPaste: (
|
||||
newParentKey: backendModule.DirectoryId,
|
||||
newParentId: backendModule.DirectoryId,
|
||||
) => void
|
||||
readonly category: Category
|
||||
readonly rootDirectoryId: DirectoryId
|
||||
readonly directoryKey: DirectoryId | null
|
||||
readonly directoryId: DirectoryId | null
|
||||
readonly path: string | null
|
||||
readonly doPaste: (newParentKey: DirectoryId, newParentId: DirectoryId) => void
|
||||
}
|
||||
|
||||
/** A context menu available everywhere in the directory. */
|
||||
@ -40,15 +44,17 @@ export const GlobalContextMenu = function GlobalContextMenu(props: GlobalContext
|
||||
const {
|
||||
hidden = false,
|
||||
backend,
|
||||
category,
|
||||
directoryKey = null,
|
||||
directoryId = null,
|
||||
path,
|
||||
rootDirectoryId,
|
||||
} = props
|
||||
const { doPaste } = props
|
||||
|
||||
const { getText } = useText()
|
||||
const { setModal, unsetModal } = useSetModal()
|
||||
const dispatchAssetListEvent = useDispatchAssetListEvent()
|
||||
const isCloud = backend.type === BackendType.remote
|
||||
|
||||
const driveStore = useDriveStore()
|
||||
const hasPasteData = useStore(
|
||||
@ -56,7 +62,28 @@ export const GlobalContextMenu = function GlobalContextMenu(props: GlobalContext
|
||||
(storeState) => (storeState.pasteData?.data.ids.size ?? 0) > 0,
|
||||
)
|
||||
|
||||
const isCloud = backend.type === BackendType.remote
|
||||
const newFolderRaw = useNewFolder(backend, category)
|
||||
const newFolder = useEventCallback(async () => {
|
||||
return await newFolderRaw(directoryId ?? rootDirectoryId, path)
|
||||
})
|
||||
const newSecretRaw = useNewSecret(backend, category)
|
||||
const newSecret = useEventCallback(async (name: string, value: string) => {
|
||||
return await newSecretRaw(name, value, directoryId ?? rootDirectoryId, path)
|
||||
})
|
||||
const newProjectRaw = useNewProject(backend, category)
|
||||
const newProject = useEventCallback(
|
||||
async (templateId: string | null | undefined, templateName: string | null | undefined) => {
|
||||
return await newProjectRaw({ templateName, templateId }, directoryId ?? rootDirectoryId, path)
|
||||
},
|
||||
)
|
||||
const newDatalinkRaw = useNewDatalink(backend, category)
|
||||
const newDatalink = useEventCallback(async (name: string, value: unknown) => {
|
||||
return await newDatalinkRaw(name, value, directoryId ?? rootDirectoryId, path)
|
||||
})
|
||||
const uploadFilesRaw = useUploadFiles(backend, category)
|
||||
const uploadFiles = useEventCallback(async (files: readonly File[]) => {
|
||||
await uploadFilesRaw(files, directoryId ?? rootDirectoryId, path)
|
||||
})
|
||||
|
||||
return (
|
||||
<ContextMenu aria-label={getText('globalContextMenuLabel')} hidden={hidden}>
|
||||
@ -65,12 +92,7 @@ export const GlobalContextMenu = function GlobalContextMenu(props: GlobalContext
|
||||
action="uploadFiles"
|
||||
doAction={async () => {
|
||||
const files = await inputFiles()
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.uploadFiles,
|
||||
parentKey: directoryKey ?? rootDirectoryId,
|
||||
parentId: directoryId ?? rootDirectoryId,
|
||||
files: Array.from(files),
|
||||
})
|
||||
await uploadFiles(Array.from(files))
|
||||
}}
|
||||
/>
|
||||
<ContextMenuEntry
|
||||
@ -78,14 +100,7 @@ export const GlobalContextMenu = function GlobalContextMenu(props: GlobalContext
|
||||
action="newProject"
|
||||
doAction={() => {
|
||||
unsetModal()
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newProject,
|
||||
parentKey: directoryKey ?? rootDirectoryId,
|
||||
parentId: directoryId ?? rootDirectoryId,
|
||||
templateId: null,
|
||||
datalinkId: null,
|
||||
preferredName: null,
|
||||
})
|
||||
void newProject(null, null)
|
||||
}}
|
||||
/>
|
||||
<ContextMenuEntry
|
||||
@ -93,11 +108,7 @@ export const GlobalContextMenu = function GlobalContextMenu(props: GlobalContext
|
||||
action="newFolder"
|
||||
doAction={() => {
|
||||
unsetModal()
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newFolder,
|
||||
parentKey: directoryKey ?? rootDirectoryId,
|
||||
parentId: directoryId ?? rootDirectoryId,
|
||||
})
|
||||
void newFolder()
|
||||
}}
|
||||
/>
|
||||
{isCloud && (
|
||||
@ -109,14 +120,8 @@ export const GlobalContextMenu = function GlobalContextMenu(props: GlobalContext
|
||||
<UpsertSecretModal
|
||||
id={null}
|
||||
name={null}
|
||||
doCreate={(name, value) => {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newSecret,
|
||||
parentKey: directoryKey ?? rootDirectoryId,
|
||||
parentId: directoryId ?? rootDirectoryId,
|
||||
name,
|
||||
value,
|
||||
})
|
||||
doCreate={async (name, value) => {
|
||||
await newSecret(name, value)
|
||||
}}
|
||||
/>,
|
||||
)
|
||||
@ -130,14 +135,8 @@ export const GlobalContextMenu = function GlobalContextMenu(props: GlobalContext
|
||||
doAction={() => {
|
||||
setModal(
|
||||
<UpsertDatalinkModal
|
||||
doCreate={(name, value) => {
|
||||
dispatchAssetListEvent({
|
||||
type: AssetListEventType.newDatalink,
|
||||
parentKey: directoryKey ?? rootDirectoryId,
|
||||
parentId: directoryId ?? rootDirectoryId,
|
||||
name,
|
||||
value,
|
||||
})
|
||||
doCreate={async (name, value) => {
|
||||
await newDatalink(name, value)
|
||||
}}
|
||||
/>,
|
||||
)
|
||||
|
@ -45,7 +45,7 @@ export default function KeyboardShortcutsSettingsSection() {
|
||||
|
||||
return (
|
||||
<>
|
||||
<ButtonGroup>
|
||||
<ButtonGroup className="grow-0">
|
||||
<DialogTrigger>
|
||||
<Button size="medium" variant="outline">
|
||||
{getText('resetAll')}
|
||||
@ -68,32 +68,25 @@ export default function KeyboardShortcutsSettingsSection() {
|
||||
<div
|
||||
{...mergeProps<JSX.IntrinsicElements['div']>()(innerProps, {
|
||||
ref: rootRef,
|
||||
// There is a horizontal scrollbar for some reason without `px-px`.
|
||||
className: 'overflow-auto px-px',
|
||||
className: 'flex-1 min-h-0 overflow-auto',
|
||||
onScroll,
|
||||
})}
|
||||
>
|
||||
<table className="table-fixed border-collapse rounded-rows">
|
||||
<thead className="sticky top-0">
|
||||
<tr className="h-row text-left text-sm font-semibold">
|
||||
<th className="pr-keyboard-shortcuts-icon-column-r min-w-keyboard-shortcuts-icon-column pl-cell-x">
|
||||
{/* Icon */}
|
||||
</th>
|
||||
<th className="min-w-keyboard-shortcuts-name-column px-cell-x">
|
||||
{getText('name')}
|
||||
</th>
|
||||
<th className="min-w-8 pl-cell-x pr-1.5">{/* Icon */}</th>
|
||||
<th className="min-w-36 px-cell-x">{getText('name')}</th>
|
||||
<th className="px-cell-x">{getText('shortcuts')}</th>
|
||||
<th className="w-full min-w-keyboard-shortcuts-description-column px-cell-x">
|
||||
{getText('description')}
|
||||
</th>
|
||||
<th className="w-full min-w-64 px-cell-x">{getText('description')}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody ref={bodyRef}>
|
||||
{visibleBindings.map((kv) => {
|
||||
const [action, info] = kv
|
||||
return (
|
||||
<tr key={action}>
|
||||
<td className="flex h-row items-center rounded-l-full bg-clip-padding pl-cell-x pr-icon-column-r">
|
||||
<tr key={action} className="rounded-rows-child">
|
||||
<td className="flex h-row items-center rounded-l-full bg-clip-padding pl-cell-x pr-1.5">
|
||||
<SvgMask
|
||||
src={info.icon ?? BlankIcon}
|
||||
color={info.color}
|
||||
@ -112,11 +105,11 @@ export default function KeyboardShortcutsSettingsSection() {
|
||||
{}
|
||||
<div className="gap-buttons flex items-center pr-4">
|
||||
{info.bindings.map((binding, j) => (
|
||||
<div
|
||||
key={j}
|
||||
className="inline-flex shrink-0 items-center gap-keyboard-shortcuts-button"
|
||||
>
|
||||
<KeyboardShortcut shortcut={binding} />
|
||||
<div key={j} className="inline-flex shrink-0 items-center gap-1">
|
||||
<KeyboardShortcut
|
||||
shortcut={binding}
|
||||
className="rounded-lg border-0.5 border-primary/10 px-1"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="medium"
|
||||
@ -132,7 +125,7 @@ export default function KeyboardShortcutsSettingsSection() {
|
||||
</div>
|
||||
))}
|
||||
<div className="grow" />
|
||||
<div className="gap-keyboard-shortcuts-buttons flex shrink-0 items-center">
|
||||
<div className="flex shrink-0 items-center gap-1">
|
||||
<DialogTrigger>
|
||||
<Button
|
||||
variant="ghost"
|
||||
|
@ -140,7 +140,7 @@ export default function MembersTable(props: MembersTableProps) {
|
||||
{getText('email')}
|
||||
</Column>
|
||||
{/* Delete button. */}
|
||||
{allowDelete && <Column className="w border-0" />}
|
||||
{allowDelete && <Column className="w-0 border-0" />}
|
||||
</TableHeader>
|
||||
<TableBody ref={bodyRef} items={users ?? []} dependencies={[users]} className="select-text">
|
||||
{(member) => (
|
||||
|
@ -31,7 +31,7 @@ export default function SettingsSection(props: SettingsSectionProps) {
|
||||
{getText(nameId)}
|
||||
</Text.Heading>
|
||||
)}
|
||||
<div className="flex flex-col">
|
||||
<div className="flex flex-col overflow-auto">
|
||||
{entries.map((entry, i) => (
|
||||
<SettingsEntry key={i} context={context} data={entry} />
|
||||
))}
|
||||
|
@ -64,7 +64,10 @@ export default function SettingsTab(props: SettingsTabProps) {
|
||||
} else {
|
||||
const content =
|
||||
columns.length === 1 ?
|
||||
<div className="flex grow flex-col gap-settings-subsection" {...contentProps}>
|
||||
<div
|
||||
className={twMerge('flex grow flex-col gap-settings-subsection', classes[0])}
|
||||
{...contentProps}
|
||||
>
|
||||
{sections.map((section) => (
|
||||
<SettingsSection key={section.nameId} context={context} data={section} />
|
||||
))}
|
||||
|
@ -385,6 +385,7 @@ export const SETTINGS_TAB_DATA: Readonly<Record<SettingsTabType, SettingsTabData
|
||||
sections: [
|
||||
{
|
||||
nameId: 'keyboardShortcutsSettingsSection',
|
||||
columnClassName: 'h-full *:flex-1 *:min-h-0',
|
||||
entries: [
|
||||
{
|
||||
type: SettingsEntryType.custom,
|
||||
|
@ -4,6 +4,7 @@ import * as React from 'react'
|
||||
import * as zustand from '#/utilities/zustand'
|
||||
import invariant from 'tiny-invariant'
|
||||
|
||||
import { useEventCallback } from '#/hooks/eventCallbackHooks'
|
||||
import type { AssetPanelContextProps } from '#/layouts/AssetPanel'
|
||||
import type { Suggestion } from '#/layouts/AssetSearchBar'
|
||||
import type { Category } from '#/layouts/CategorySwitcher/Category'
|
||||
@ -18,7 +19,6 @@ import type {
|
||||
DirectoryId,
|
||||
} from 'enso-common/src/services/Backend'
|
||||
import { EMPTY_ARRAY } from 'enso-common/src/utilities/data/array'
|
||||
import { useEventCallback } from '../hooks/eventCallbackHooks'
|
||||
|
||||
// ==================
|
||||
// === DriveStore ===
|
||||
@ -45,6 +45,8 @@ interface DriveStore {
|
||||
readonly setCanDownload: (canDownload: boolean) => void
|
||||
readonly pasteData: PasteData<DrivePastePayload> | null
|
||||
readonly setPasteData: (pasteData: PasteData<DrivePastePayload> | null) => void
|
||||
readonly expandedDirectoryIds: readonly DirectoryId[]
|
||||
readonly setExpandedDirectoryIds: (selectedKeys: readonly DirectoryId[]) => void
|
||||
readonly selectedKeys: ReadonlySet<AssetId>
|
||||
readonly setSelectedKeys: (selectedKeys: ReadonlySet<AssetId>) => void
|
||||
readonly visuallySelectedKeys: ReadonlySet<AssetId> | null
|
||||
@ -137,6 +139,12 @@ export default function DriveProvider(props: ProjectsProviderProps) {
|
||||
set({ pasteData })
|
||||
}
|
||||
},
|
||||
expandedDirectoryIds: EMPTY_ARRAY,
|
||||
setExpandedDirectoryIds: (expandedDirectoryIds) => {
|
||||
if (get().expandedDirectoryIds !== expandedDirectoryIds) {
|
||||
set({ expandedDirectoryIds })
|
||||
}
|
||||
},
|
||||
selectedKeys: EMPTY_SET,
|
||||
setSelectedKeys: (selectedKeys) => {
|
||||
if (get().selectedKeys !== selectedKeys) {
|
||||
@ -299,13 +307,25 @@ export function useSetPasteData() {
|
||||
return zustand.useStore(store, (state) => state.setPasteData)
|
||||
}
|
||||
|
||||
/** The expanded directories in the Asset Table. */
|
||||
export function useExpandedDirectoryIds() {
|
||||
const store = useDriveStore()
|
||||
return zustand.useStore(store, (state) => state.expandedDirectoryIds)
|
||||
}
|
||||
|
||||
/** A function to set the expanded directoyIds in the Asset Table. */
|
||||
export function useSetExpandedDirectoryIds() {
|
||||
const store = useDriveStore()
|
||||
return zustand.useStore(store, (state) => state.setExpandedDirectoryIds)
|
||||
}
|
||||
|
||||
/** The selected keys in the Asset Table. */
|
||||
export function useSelectedKeys() {
|
||||
const store = useDriveStore()
|
||||
return zustand.useStore(store, (state) => state.selectedKeys)
|
||||
}
|
||||
|
||||
/** A function to set the selected keys of the Asset Table selection. */
|
||||
/** A function to set the selected keys in the Asset Table. */
|
||||
export function useSetSelectedKeys() {
|
||||
const store = useDriveStore()
|
||||
return zustand.useStore(store, (state) => state.setSelectedKeys)
|
||||
@ -482,3 +502,25 @@ export function useSetIsAssetPanelHidden() {
|
||||
const store = useDriveStore()
|
||||
return zustand.useStore(store, (state) => state.setIsAssetPanelHidden)
|
||||
}
|
||||
|
||||
/** Toggle whether a specific directory is expanded. */
|
||||
export function useToggleDirectoryExpansion() {
|
||||
const driveStore = useDriveStore()
|
||||
const setExpandedDirectoryIds = useSetExpandedDirectoryIds()
|
||||
|
||||
return useEventCallback((directoryId: DirectoryId, override?: boolean) => {
|
||||
const expandedDirectoryIds = driveStore.getState().expandedDirectoryIds
|
||||
const isExpanded = expandedDirectoryIds.includes(directoryId)
|
||||
const shouldExpand = override ?? !isExpanded
|
||||
|
||||
if (shouldExpand !== isExpanded) {
|
||||
React.startTransition(() => {
|
||||
if (shouldExpand) {
|
||||
setExpandedDirectoryIds([...expandedDirectoryIds, directoryId])
|
||||
} else {
|
||||
setExpandedDirectoryIds(expandedDirectoryIds.filter((id) => id !== directoryId))
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -97,8 +97,6 @@
|
||||
--spinner-fast-transition-duration: 1s;
|
||||
--spinner-medium-transition-duration: 5s;
|
||||
--spinner-slow-transition-duration: 90s;
|
||||
--modifiers-gap: 0.1875rem;
|
||||
--modifiers-macos-gap: 0.125rem;
|
||||
--cell-padding-x: 0.5rem;
|
||||
--plus-icon-size: 1.125rem;
|
||||
/* The horizontal gap between adjacent context menus. */
|
||||
@ -322,12 +320,6 @@
|
||||
|
||||
--members-name-column-width: 12rem;
|
||||
--members-email-column-width: 12rem;
|
||||
--keyboard-shortcuts-icon-column-width: 2rem;
|
||||
--keyboard-shortcuts-name-column-width: 9rem;
|
||||
--keyboard-shortcuts-description-column-width: 16rem;
|
||||
--icon-column-padding-right: 0.375rem;
|
||||
/* The horizontal gap between each icon for modifying the shortcuts for a particular action. */
|
||||
--keyboard-shortcuts-button-gap: 0.25rem;
|
||||
|
||||
/***********************\
|
||||
|* Authentication flow *|
|
||||
|
@ -21,9 +21,10 @@
|
||||
--color-frame-selected-bg: rgb(255 255 255 / 0.7);
|
||||
--color-widget-slight: rgb(255 255 255 / 0.06);
|
||||
--color-widget: rgb(255 255 255 / 0.12);
|
||||
--color-widget-focus: rgb(255 255 255 / 0.25);
|
||||
--color-widget-focus: rgb(255 255 255 / 1);
|
||||
--color-widget-unfocus: rgb(255 255 255 / 0.6);
|
||||
--color-widget-selected: rgb(255 255 255 / 0.58);
|
||||
--color-widget-selection: rgba(255 255 255 / 0.2);
|
||||
--color-widget-selection: rgba(0 0 0 / 0.2);
|
||||
--color-port-connected: rgb(255 255 255 / 0.15);
|
||||
|
||||
/* colors for specific icons */
|
||||
|
@ -12,6 +12,7 @@ export const codeEditorBindings = defineKeybinds('code-editor', {
|
||||
export const documentationEditorBindings = defineKeybinds('documentation-editor', {
|
||||
toggle: ['Mod+D'],
|
||||
openLink: ['Mod+PointerMain'],
|
||||
paste: ['Mod+V'],
|
||||
})
|
||||
|
||||
export const interactionBindings = defineKeybinds('current-interaction', {
|
||||
|
@ -1,380 +1,13 @@
|
||||
<script setup lang="ts">
|
||||
import type { ChangeSet, Diagnostic, Highlighter } from '@/components/CodeEditor/codemirror'
|
||||
import EditorRoot from '@/components/EditorRoot.vue'
|
||||
import { useGraphStore, type NodeId } from '@/stores/graph'
|
||||
import { useProjectStore } from '@/stores/project'
|
||||
import { useSuggestionDbStore } from '@/stores/suggestionDatabase'
|
||||
import { useAutoBlur } from '@/util/autoBlur'
|
||||
import { unwrap } from '@/util/data/result'
|
||||
import { qnJoin, tryQualifiedName } from '@/util/qualifiedName'
|
||||
import { EditorSelection } from '@codemirror/state'
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import { createDebouncer } from 'lib0/eventloop'
|
||||
import type { ComponentInstance } from 'vue'
|
||||
import { computed, onMounted, onUnmounted, ref, shallowRef, watch, watchEffect } from 'vue'
|
||||
import { MutableModule } from 'ydoc-shared/ast'
|
||||
import { textChangeToEdits, type SourceRangeEdit } from 'ydoc-shared/util/data/text'
|
||||
import { rangeEncloses, type Origin } from 'ydoc-shared/yjsModel'
|
||||
import { defineAsyncComponent } from 'vue'
|
||||
|
||||
// Use dynamic imports to aid code splitting. The codemirror dependency is quite large.
|
||||
const {
|
||||
Annotation,
|
||||
StateEffect,
|
||||
StateField,
|
||||
bracketMatching,
|
||||
foldGutter,
|
||||
lintGutter,
|
||||
highlightSelectionMatches,
|
||||
minimalSetup,
|
||||
EditorState,
|
||||
EditorView,
|
||||
syntaxHighlighting,
|
||||
defaultHighlightStyle,
|
||||
tooltips,
|
||||
enso,
|
||||
linter,
|
||||
forceLinting,
|
||||
lsDiagnosticsToCMDiagnostics,
|
||||
hoverTooltip,
|
||||
textEditToChangeSpec,
|
||||
} = await import('@/components/CodeEditor/codemirror')
|
||||
|
||||
const projectStore = useProjectStore()
|
||||
const graphStore = useGraphStore()
|
||||
const suggestionDbStore = useSuggestionDbStore()
|
||||
const editorRoot = ref<ComponentInstance<typeof EditorRoot>>()
|
||||
const rootElement = computed(() => editorRoot.value?.rootElement)
|
||||
useAutoBlur(rootElement)
|
||||
|
||||
const executionContextDiagnostics = shallowRef<Diagnostic[]>([])
|
||||
|
||||
// Effect that can be applied to the document to invalidate the linter state.
|
||||
const diagnosticsUpdated = StateEffect.define()
|
||||
// State value that is perturbed by any `diagnosticsUpdated` effect.
|
||||
const diagnosticsVersion = StateField.define({
|
||||
create: (_state) => 0,
|
||||
update: (value, transaction) => {
|
||||
for (const effect of transaction.effects) {
|
||||
if (effect.is(diagnosticsUpdated)) value += 1
|
||||
}
|
||||
return value
|
||||
},
|
||||
})
|
||||
|
||||
const expressionUpdatesDiagnostics = computed(() => {
|
||||
const updates = projectStore.computedValueRegistry.db
|
||||
const panics = updates.type.reverseLookup('Panic')
|
||||
const errors = updates.type.reverseLookup('DataflowError')
|
||||
const diagnostics: Diagnostic[] = []
|
||||
for (const externalId of iter.chain(panics, errors)) {
|
||||
const update = updates.get(externalId)
|
||||
if (!update) continue
|
||||
const astId = graphStore.db.idFromExternal(externalId)
|
||||
if (!astId) continue
|
||||
const span = graphStore.moduleSource.getSpan(astId)
|
||||
if (!span) continue
|
||||
const [from, to] = span
|
||||
switch (update.payload.type) {
|
||||
case 'Panic': {
|
||||
diagnostics.push({ from, to, message: update.payload.message, severity: 'error' })
|
||||
break
|
||||
}
|
||||
case 'DataflowError': {
|
||||
const error = projectStore.dataflowErrors.lookup(externalId)
|
||||
if (error?.value?.message) {
|
||||
diagnostics.push({ from, to, message: error.value.message, severity: 'error' })
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return diagnostics
|
||||
})
|
||||
|
||||
// == CodeMirror editor setup ==
|
||||
|
||||
const editorView = new EditorView()
|
||||
const viewInitialized = ref(false)
|
||||
watchEffect(() => {
|
||||
const module = projectStore.module
|
||||
if (!module) return
|
||||
editorView.setState(
|
||||
EditorState.create({
|
||||
extensions: [
|
||||
minimalSetup,
|
||||
updateListener(),
|
||||
diagnosticsVersion,
|
||||
syntaxHighlighting(defaultHighlightStyle as Highlighter),
|
||||
bracketMatching(),
|
||||
foldGutter(),
|
||||
lintGutter(),
|
||||
highlightSelectionMatches(),
|
||||
tooltips({ position: 'absolute' }),
|
||||
hoverTooltip((ast, syn) => {
|
||||
const dom = document.createElement('div')
|
||||
const astSpan = ast.span()
|
||||
let foundNode: NodeId | undefined
|
||||
for (const [id, node] of graphStore.db.nodeIdToNode.entries()) {
|
||||
const rootSpan = graphStore.moduleSource.getSpan(node.rootExpr.id)
|
||||
if (rootSpan && rangeEncloses(rootSpan, astSpan)) {
|
||||
foundNode = id
|
||||
break
|
||||
}
|
||||
}
|
||||
const expressionInfo = foundNode && graphStore.db.getExpressionInfo(foundNode)
|
||||
const nodeColor = foundNode && graphStore.db.getNodeColorStyle(foundNode)
|
||||
|
||||
if (foundNode != null) {
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`AST ID: ${foundNode}`))
|
||||
}
|
||||
if (expressionInfo != null) {
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`Type: ${expressionInfo.typename ?? 'Unknown'}`))
|
||||
}
|
||||
if (expressionInfo?.profilingInfo[0] != null) {
|
||||
const profile = expressionInfo.profilingInfo[0]
|
||||
const executionTime = (profile.ExecutionTime.nanoTime / 1_000_000).toFixed(3)
|
||||
const text = `Execution Time: ${executionTime}ms`
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(text))
|
||||
}
|
||||
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`Syntax: ${syn.toString()}`))
|
||||
const method = expressionInfo?.methodCall?.methodPointer
|
||||
if (method != null) {
|
||||
const moduleName = tryQualifiedName(method.module)
|
||||
const methodName = tryQualifiedName(method.name)
|
||||
const qualifiedName = qnJoin(unwrap(moduleName), unwrap(methodName))
|
||||
const [id] = suggestionDbStore.entries.nameToId.lookup(qualifiedName)
|
||||
const suggestionEntry = id != null ? suggestionDbStore.entries.get(id) : undefined
|
||||
if (suggestionEntry != null) {
|
||||
const groupNode = dom.appendChild(document.createElement('div'))
|
||||
groupNode.appendChild(document.createTextNode('Group: '))
|
||||
const groupNameNode = groupNode.appendChild(document.createElement('span'))
|
||||
groupNameNode.appendChild(document.createTextNode(`${method.module}.${method.name}`))
|
||||
if (nodeColor) {
|
||||
groupNameNode.style.color = nodeColor
|
||||
}
|
||||
}
|
||||
}
|
||||
return { dom }
|
||||
}),
|
||||
enso(),
|
||||
linter(
|
||||
() => [...executionContextDiagnostics.value, ...expressionUpdatesDiagnostics.value],
|
||||
{
|
||||
needsRefresh(update) {
|
||||
return (
|
||||
update.state.field(diagnosticsVersion) !==
|
||||
update.startState.field(diagnosticsVersion)
|
||||
)
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
}),
|
||||
)
|
||||
viewInitialized.value = true
|
||||
})
|
||||
|
||||
function changeSetToTextEdits(changes: ChangeSet) {
|
||||
const textEdits = new Array<SourceRangeEdit>()
|
||||
changes.iterChanges((from, to, _fromB, _toB, insert) =>
|
||||
textEdits.push({ range: [from, to], insert: insert.toString() }),
|
||||
)
|
||||
return textEdits
|
||||
}
|
||||
|
||||
let pendingChanges: ChangeSet | undefined
|
||||
let currentModule: MutableModule | undefined
|
||||
/** Set the editor contents the current module state, discarding any pending editor-initiated changes. */
|
||||
function resetView() {
|
||||
console.info(`Resetting the editor to the module code.`)
|
||||
pendingChanges = undefined
|
||||
currentModule = undefined
|
||||
const viewText = editorView.state.doc.toString()
|
||||
const code = graphStore.moduleSource.text
|
||||
editorView.dispatch({
|
||||
changes: textChangeToEdits(viewText, code).map(textEditToChangeSpec),
|
||||
annotations: synchronizedModule.of(graphStore.startEdit()),
|
||||
})
|
||||
}
|
||||
|
||||
/** Apply any pending changes to the currently-synchronized module, clearing the set of pending changes. */
|
||||
function commitPendingChanges() {
|
||||
if (!pendingChanges || !currentModule) return
|
||||
try {
|
||||
currentModule.applyTextEdits(changeSetToTextEdits(pendingChanges), graphStore.viewModule)
|
||||
graphStore.commitEdit(currentModule, undefined, 'local:userAction:CodeEditor')
|
||||
} catch (error) {
|
||||
console.error(`Code Editor failed to modify module`, error)
|
||||
resetView()
|
||||
}
|
||||
pendingChanges = undefined
|
||||
}
|
||||
|
||||
function updateListener() {
|
||||
const debouncer = createDebouncer(0)
|
||||
return EditorView.updateListener.of((update) => {
|
||||
for (const transaction of update.transactions) {
|
||||
const newModule = transaction.annotation(synchronizedModule)
|
||||
if (newModule) {
|
||||
// Flush the pipeline of edits that were based on the old module.
|
||||
commitPendingChanges()
|
||||
currentModule = newModule
|
||||
} else if (transaction.docChanged && currentModule) {
|
||||
pendingChanges =
|
||||
pendingChanges ? pendingChanges.compose(transaction.changes) : transaction.changes
|
||||
// Defer the update until after pending events have been processed, so that if changes are arriving faster than
|
||||
// we would be able to apply them individually we coalesce them to keep up.
|
||||
debouncer(commitPendingChanges)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
let needResync = false
|
||||
// Indicates a change updating the text to correspond to the given module state.
|
||||
const synchronizedModule = Annotation.define<MutableModule>()
|
||||
watch(
|
||||
viewInitialized,
|
||||
(ready) => {
|
||||
if (ready) graphStore.moduleSource.observe(observeSourceChange)
|
||||
},
|
||||
{ immediate: true },
|
||||
const LazyCodeEditor = defineAsyncComponent(
|
||||
() => import('@/components/CodeEditor/CodeEditorImpl.vue'),
|
||||
)
|
||||
onUnmounted(() => graphStore.moduleSource.unobserve(observeSourceChange))
|
||||
|
||||
function observeSourceChange(textEdits: readonly SourceRangeEdit[], origin: Origin | undefined) {
|
||||
// If we received an update from outside the Code Editor while the editor contained uncommitted changes, we cannot
|
||||
// proceed incrementally; we wait for the changes to be merged as Y.Js AST updates, and then set the view to the
|
||||
// resulting code.
|
||||
if (needResync) {
|
||||
if (!pendingChanges) {
|
||||
resetView()
|
||||
needResync = false
|
||||
}
|
||||
return
|
||||
}
|
||||
// When we aren't in the `needResync` state, we can ignore updates that originated in the Code Editor.
|
||||
if (origin === 'local:userAction:CodeEditor') return
|
||||
if (pendingChanges) {
|
||||
console.info(`Deferring update (editor dirty).`)
|
||||
needResync = true
|
||||
return
|
||||
}
|
||||
|
||||
// If none of the above exit-conditions were reached, the transaction is applicable to our current state.
|
||||
editorView.dispatch({
|
||||
changes: textEdits.map(textEditToChangeSpec),
|
||||
annotations: synchronizedModule.of(graphStore.startEdit()),
|
||||
})
|
||||
}
|
||||
|
||||
// The LS protocol doesn't identify what version of the file updates are in reference to. When diagnostics are received
|
||||
// from the LS, we map them to the text assuming that they are applicable to the current version of the module. This
|
||||
// will be correct if there is no one else editing, and we aren't editing faster than the LS can send updates. Typing
|
||||
// too quickly can result in incorrect ranges, but at idle it should correct itself when we receive new diagnostics.
|
||||
watch([viewInitialized, () => projectStore.diagnostics], ([ready, diagnostics]) => {
|
||||
if (!ready) return
|
||||
executionContextDiagnostics.value =
|
||||
graphStore.moduleSource.text ?
|
||||
lsDiagnosticsToCMDiagnostics(graphStore.moduleSource.text, diagnostics)
|
||||
: []
|
||||
})
|
||||
|
||||
watch([executionContextDiagnostics, expressionUpdatesDiagnostics], () => {
|
||||
editorView.dispatch({ effects: diagnosticsUpdated.of(null) })
|
||||
forceLinting(editorView)
|
||||
})
|
||||
|
||||
onMounted(() => {
|
||||
editorView.focus()
|
||||
rootElement.value?.prepend(editorView.dom)
|
||||
|
||||
// API for e2e tests.
|
||||
;(window as any).__codeEditorApi = {
|
||||
textContent: () => editorView.state.doc.toString(),
|
||||
textLength: () => editorView.state.doc.length,
|
||||
indexOf: (substring: string, position?: number) =>
|
||||
editorView.state.doc.toString().indexOf(substring, position),
|
||||
placeCursor: (at: number) => {
|
||||
editorView.dispatch({ selection: EditorSelection.create([EditorSelection.cursor(at)]) })
|
||||
},
|
||||
select: (from: number, to: number) => {
|
||||
editorView.dispatch({ selection: EditorSelection.create([EditorSelection.range(from, to)]) })
|
||||
},
|
||||
selectAndReplace: (from: number, to: number, replaceWith: string) => {
|
||||
editorView.dispatch({ selection: EditorSelection.create([EditorSelection.range(from, to)]) })
|
||||
editorView.dispatch(editorView.state.update(editorView.state.replaceSelection(replaceWith)))
|
||||
},
|
||||
writeText: (text: string, from: number) => {
|
||||
editorView.dispatch({
|
||||
changes: [{ from: from, insert: text }],
|
||||
selection: { anchor: from + text.length },
|
||||
})
|
||||
},
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<EditorRoot ref="editorRoot" class="CodeEditor" />
|
||||
<Suspense>
|
||||
<LazyCodeEditor />
|
||||
</Suspense>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.CodeEditor {
|
||||
font-family: var(--font-mono);
|
||||
backdrop-filter: var(--blur-app-bg);
|
||||
background-color: rgba(255, 255, 255, 0.9);
|
||||
box-shadow: 0 4px 30px rgba(0, 0, 0, 0.1);
|
||||
border: 1px solid rgba(255, 255, 255, 0.4);
|
||||
}
|
||||
|
||||
:deep(.cm-scroller) {
|
||||
font-family: var(--font-mono);
|
||||
/* Prevent touchpad back gesture, which can be triggered while panning. */
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
|
||||
:deep(.cm-editor) {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
opacity: 1;
|
||||
color: black;
|
||||
text-shadow: 0 0 2px rgba(255, 255, 255, 0.4);
|
||||
font-size: 12px;
|
||||
outline: 1px solid transparent;
|
||||
transition: outline 0.1s ease-in-out;
|
||||
}
|
||||
|
||||
:deep(.cm-focused) {
|
||||
outline: 1px solid rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
|
||||
:deep(.cm-tooltip-hover) {
|
||||
padding: 4px;
|
||||
border-radius: 4px;
|
||||
border: 1px solid rgba(0, 0, 0, 0.4);
|
||||
text-shadow: 0 0 2px rgba(255, 255, 255, 0.4);
|
||||
|
||||
&::before {
|
||||
content: '';
|
||||
background-color: rgba(255, 255, 255, 0.9);
|
||||
backdrop-filter: blur(64px);
|
||||
border-radius: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
:deep(.cm-gutters) {
|
||||
border-radius: 3px 0 0 3px;
|
||||
min-width: 32px;
|
||||
}
|
||||
</style>
|
||||
|
@ -0,0 +1,123 @@
|
||||
<script setup lang="ts">
|
||||
import { useEnsoDiagnostics } from '@/components/CodeEditor/diagnostics'
|
||||
import { ensoSyntax } from '@/components/CodeEditor/ensoSyntax'
|
||||
import { useEnsoSourceSync } from '@/components/CodeEditor/sync'
|
||||
import { ensoHoverTooltip } from '@/components/CodeEditor/tooltips'
|
||||
import EditorRoot from '@/components/codemirror/EditorRoot.vue'
|
||||
import { testSupport } from '@/components/codemirror/testSupport'
|
||||
import { useGraphStore } from '@/stores/graph'
|
||||
import { useProjectStore } from '@/stores/project'
|
||||
import { useSuggestionDbStore } from '@/stores/suggestionDatabase'
|
||||
import { useAutoBlur } from '@/util/autoBlur'
|
||||
import {
|
||||
bracketMatching,
|
||||
defaultHighlightStyle,
|
||||
foldGutter,
|
||||
syntaxHighlighting,
|
||||
} from '@codemirror/language'
|
||||
import { lintGutter } from '@codemirror/lint'
|
||||
import { highlightSelectionMatches } from '@codemirror/search'
|
||||
import { EditorState } from '@codemirror/state'
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { type Highlighter } from '@lezer/highlight'
|
||||
import { minimalSetup } from 'codemirror'
|
||||
import { computed, onMounted, ref, watch, type ComponentInstance } from 'vue'
|
||||
|
||||
const projectStore = useProjectStore()
|
||||
const graphStore = useGraphStore()
|
||||
const suggestionDbStore = useSuggestionDbStore()
|
||||
const editorRoot = ref<ComponentInstance<typeof EditorRoot>>()
|
||||
const rootElement = computed(() => editorRoot.value?.rootElement)
|
||||
useAutoBlur(rootElement)
|
||||
|
||||
const editorView = new EditorView()
|
||||
;(window as any).__codeEditorApi = testSupport(editorView)
|
||||
|
||||
const { updateListener, connectModuleListener } = useEnsoSourceSync(graphStore, editorView)
|
||||
const ensoDiagnostics = useEnsoDiagnostics(projectStore, graphStore, editorView)
|
||||
|
||||
watch(
|
||||
() => projectStore.module,
|
||||
(module) => {
|
||||
if (!module) return
|
||||
editorView.setState(
|
||||
EditorState.create({
|
||||
extensions: [
|
||||
minimalSetup,
|
||||
syntaxHighlighting(defaultHighlightStyle as Highlighter),
|
||||
bracketMatching(),
|
||||
foldGutter(),
|
||||
lintGutter(),
|
||||
highlightSelectionMatches(),
|
||||
ensoSyntax(),
|
||||
updateListener,
|
||||
ensoHoverTooltip(graphStore, suggestionDbStore),
|
||||
ensoDiagnostics,
|
||||
],
|
||||
}),
|
||||
)
|
||||
connectModuleListener()
|
||||
},
|
||||
{ immediate: true },
|
||||
)
|
||||
|
||||
onMounted(() => {
|
||||
editorView.focus()
|
||||
rootElement.value?.prepend(editorView.dom)
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<EditorRoot ref="editorRoot" class="CodeEditor" />
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.CodeEditor {
|
||||
font-family: var(--font-mono);
|
||||
backdrop-filter: var(--blur-app-bg);
|
||||
background-color: rgba(255, 255, 255, 0.9);
|
||||
box-shadow: 0 4px 30px rgba(0, 0, 0, 0.1);
|
||||
border: 1px solid rgba(255, 255, 255, 0.4);
|
||||
}
|
||||
|
||||
:deep(.cm-scroller) {
|
||||
font-family: var(--font-mono);
|
||||
/* Prevent touchpad back gesture, which can be triggered while panning. */
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
|
||||
:deep(.cm-editor) {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
opacity: 1;
|
||||
color: black;
|
||||
text-shadow: 0 0 2px rgba(255, 255, 255, 0.4);
|
||||
font-size: 12px;
|
||||
outline: 1px solid transparent;
|
||||
transition: outline 0.1s ease-in-out;
|
||||
}
|
||||
|
||||
:deep(.cm-focused) {
|
||||
outline: 1px solid rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
|
||||
:deep(.cm-tooltip-hover) {
|
||||
padding: 4px;
|
||||
border-radius: 4px;
|
||||
border: 1px solid rgba(0, 0, 0, 0.4);
|
||||
text-shadow: 0 0 2px rgba(255, 255, 255, 0.4);
|
||||
|
||||
&::before {
|
||||
content: '';
|
||||
background-color: rgba(255, 255, 255, 0.9);
|
||||
backdrop-filter: blur(64px);
|
||||
border-radius: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
:deep(.cm-gutters) {
|
||||
border-radius: 3px 0 0 3px;
|
||||
min-width: 32px;
|
||||
}
|
||||
</style>
|
@ -1,207 +0,0 @@
|
||||
/**
|
||||
* @file This module is a collection of codemirror related imports that are intended to be loaded
|
||||
* asynchronously using a single dynamic import, allowing for code splitting.
|
||||
*/
|
||||
|
||||
export { defaultKeymap } from '@codemirror/commands'
|
||||
export {
|
||||
bracketMatching,
|
||||
defaultHighlightStyle,
|
||||
foldGutter,
|
||||
foldNodeProp,
|
||||
syntaxHighlighting,
|
||||
} from '@codemirror/language'
|
||||
export { forceLinting, lintGutter, linter, type Diagnostic } from '@codemirror/lint'
|
||||
export { highlightSelectionMatches } from '@codemirror/search'
|
||||
export { Annotation, EditorState, StateEffect, StateField, type ChangeSet } from '@codemirror/state'
|
||||
export { EditorView, tooltips, type TooltipView } from '@codemirror/view'
|
||||
export { type Highlighter } from '@lezer/highlight'
|
||||
export { minimalSetup } from 'codemirror'
|
||||
export { yCollab } from 'y-codemirror.next'
|
||||
import { RawAstExtended } from '@/util/ast/extended'
|
||||
import { RawAst } from '@/util/ast/raw'
|
||||
import {
|
||||
Language,
|
||||
LanguageSupport,
|
||||
defineLanguageFacet,
|
||||
foldNodeProp,
|
||||
languageDataProp,
|
||||
syntaxTree,
|
||||
} from '@codemirror/language'
|
||||
import { type Diagnostic } from '@codemirror/lint'
|
||||
import type { ChangeSpec } from '@codemirror/state'
|
||||
import { hoverTooltip as originalHoverTooltip, type TooltipView } from '@codemirror/view'
|
||||
import {
|
||||
NodeProp,
|
||||
NodeSet,
|
||||
NodeType,
|
||||
Parser,
|
||||
Tree,
|
||||
type Input,
|
||||
type PartialParse,
|
||||
type SyntaxNode,
|
||||
} from '@lezer/common'
|
||||
import { styleTags, tags } from '@lezer/highlight'
|
||||
import { EditorView } from 'codemirror'
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import type { Diagnostic as LSDiagnostic } from 'ydoc-shared/languageServerTypes'
|
||||
import type { SourceRangeEdit } from 'ydoc-shared/util/data/text'
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function lsDiagnosticsToCMDiagnostics(
|
||||
source: string,
|
||||
diagnostics: LSDiagnostic[],
|
||||
): Diagnostic[] {
|
||||
if (!diagnostics.length) return []
|
||||
const results: Diagnostic[] = []
|
||||
let pos = 0
|
||||
const lineStartIndices = []
|
||||
for (const line of source.split('\n')) {
|
||||
lineStartIndices.push(pos)
|
||||
pos += line.length + 1
|
||||
}
|
||||
for (const diagnostic of diagnostics) {
|
||||
if (!diagnostic.location) continue
|
||||
const from =
|
||||
(lineStartIndices[diagnostic.location.start.line] ?? 0) + diagnostic.location.start.character
|
||||
const to =
|
||||
(lineStartIndices[diagnostic.location.end.line] ?? 0) + diagnostic.location.end.character
|
||||
if (to > source.length || from > source.length) {
|
||||
// Suppress temporary errors if the source is not the version of the document the LS is reporting diagnostics for.
|
||||
continue
|
||||
}
|
||||
const severity =
|
||||
diagnostic.kind === 'Error' ? 'error'
|
||||
: diagnostic.kind === 'Warning' ? 'warning'
|
||||
: 'info'
|
||||
results.push({ from, to, message: diagnostic.message, severity })
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
type AstNode = RawAstExtended<RawAst.Tree | RawAst.Token, false>
|
||||
|
||||
const nodeTypes: NodeType[] = [
|
||||
...RawAst.Tree.typeNames.map((name, id) => NodeType.define({ id, name })),
|
||||
...RawAst.Token.typeNames.map((name, id) =>
|
||||
NodeType.define({ id: id + RawAst.Tree.typeNames.length, name: 'Token' + name }),
|
||||
),
|
||||
]
|
||||
|
||||
const nodeSet = new NodeSet(nodeTypes).extend(
|
||||
styleTags({
|
||||
Ident: tags.variableName,
|
||||
'Private!': tags.variableName,
|
||||
Number: tags.number,
|
||||
'Wildcard!': tags.variableName,
|
||||
'TextLiteral!': tags.string,
|
||||
OprApp: tags.operator,
|
||||
TokenOperator: tags.operator,
|
||||
'Assignment/TokenOperator': tags.definitionOperator,
|
||||
UnaryOprApp: tags.operator,
|
||||
'Function/Ident': tags.function(tags.variableName),
|
||||
ForeignFunction: tags.function(tags.variableName),
|
||||
'Import/TokenIdent': tags.function(tags.moduleKeyword),
|
||||
Export: tags.function(tags.moduleKeyword),
|
||||
Lambda: tags.function(tags.variableName),
|
||||
Documented: tags.docComment,
|
||||
ConstructorDefinition: tags.function(tags.variableName),
|
||||
}),
|
||||
foldNodeProp.add({
|
||||
Function: (node) => node.lastChild,
|
||||
ArgumentBlockApplication: (node) => node,
|
||||
OperatorBlockApplication: (node) => node,
|
||||
}),
|
||||
)
|
||||
|
||||
export const astProp = new NodeProp<AstNode>({ perNode: true })
|
||||
|
||||
function astToCodeMirrorTree(
|
||||
nodeSet: NodeSet,
|
||||
ast: AstNode,
|
||||
props?: readonly [number | NodeProp<any>, any][] | undefined,
|
||||
): Tree {
|
||||
const [start, end] = ast.span()
|
||||
const children = ast.children()
|
||||
|
||||
const childrenToConvert = iter.tryGetSoleValue(children)?.isToken() ? [] : children
|
||||
|
||||
const tree = new Tree(
|
||||
nodeSet.types[ast.inner.type + (ast.isToken() ? RawAst.Tree.typeNames.length : 0)]!,
|
||||
childrenToConvert.map((child) => astToCodeMirrorTree(nodeSet, child)),
|
||||
childrenToConvert.map((child) => child.span()[0] - start),
|
||||
end - start,
|
||||
[...(props ?? []), [astProp, ast]],
|
||||
)
|
||||
return tree
|
||||
}
|
||||
|
||||
const facet = defineLanguageFacet()
|
||||
|
||||
class EnsoParser extends Parser {
|
||||
nodeSet
|
||||
constructor() {
|
||||
super()
|
||||
this.nodeSet = nodeSet
|
||||
}
|
||||
cachedCode: string | undefined
|
||||
cachedTree: Tree | undefined
|
||||
createParse(input: Input): PartialParse {
|
||||
return {
|
||||
parsedPos: input.length,
|
||||
stopAt: () => {},
|
||||
stoppedAt: null,
|
||||
advance: () => {
|
||||
const code = input.read(0, input.length)
|
||||
if (code !== this.cachedCode || this.cachedTree == null) {
|
||||
this.cachedCode = code
|
||||
const ast = RawAstExtended.parse(code)
|
||||
this.cachedTree = astToCodeMirrorTree(this.nodeSet, ast, [[languageDataProp, facet]])
|
||||
}
|
||||
return this.cachedTree
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class EnsoLanguage extends Language {
|
||||
constructor() {
|
||||
super(facet, new EnsoParser())
|
||||
}
|
||||
}
|
||||
|
||||
const ensoLanguage = new EnsoLanguage()
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function enso() {
|
||||
return new LanguageSupport(ensoLanguage)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function hoverTooltip(
|
||||
create: (
|
||||
ast: AstNode,
|
||||
syntax: SyntaxNode,
|
||||
) => TooltipView | ((view: EditorView) => TooltipView) | null | undefined,
|
||||
) {
|
||||
return originalHoverTooltip((view, pos, side) => {
|
||||
const syntaxNode = syntaxTree(view.state).resolveInner(pos, side)
|
||||
const astNode = syntaxNode.tree?.prop(astProp)
|
||||
if (astNode == null) return null
|
||||
const domOrCreate = create(astNode, syntaxNode)
|
||||
if (domOrCreate == null) return null
|
||||
|
||||
return {
|
||||
pos: syntaxNode.from,
|
||||
end: syntaxNode.to,
|
||||
above: true,
|
||||
arrow: true,
|
||||
create: typeof domOrCreate !== 'function' ? () => domOrCreate : domOrCreate,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function textEditToChangeSpec({ range: [from, to], insert }: SourceRangeEdit): ChangeSpec {
|
||||
return { from, to, insert }
|
||||
}
|
139
app/gui/src/project-view/components/CodeEditor/diagnostics.ts
Normal file
139
app/gui/src/project-view/components/CodeEditor/diagnostics.ts
Normal file
@ -0,0 +1,139 @@
|
||||
import { type GraphStore } from '@/stores/graph'
|
||||
import { type ProjectStore } from '@/stores/project'
|
||||
import { type Diagnostic, forceLinting, linter } from '@codemirror/lint'
|
||||
import { type Extension, StateEffect, StateField } from '@codemirror/state'
|
||||
import { type EditorView } from '@codemirror/view'
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import { computed, shallowRef, watch } from 'vue'
|
||||
import { type Diagnostic as LSDiagnostic, type Position } from 'ydoc-shared/languageServerTypes'
|
||||
|
||||
const executionContextDiagnostics = shallowRef<Diagnostic[]>([])
|
||||
|
||||
// Effect that can be applied to the document to invalidate the linter state.
|
||||
const diagnosticsUpdated = StateEffect.define()
|
||||
// State value that is perturbed by any `diagnosticsUpdated` effect.
|
||||
const diagnosticsVersion = StateField.define({
|
||||
create: (_state) => 0,
|
||||
update: (value, transaction) => {
|
||||
for (const effect of transaction.effects) {
|
||||
if (effect.is(diagnosticsUpdated)) value += 1
|
||||
}
|
||||
return value
|
||||
},
|
||||
})
|
||||
|
||||
/** Given a text, indexes it and returns a function for converting between different ways of identifying positions. */
|
||||
function stringPosConverter(text: string) {
|
||||
let pos = 0
|
||||
const lineStartIndex: number[] = []
|
||||
for (const line of text.split('\n')) {
|
||||
lineStartIndex.push(pos)
|
||||
pos += line.length + 1
|
||||
}
|
||||
const length = text.length
|
||||
|
||||
function lineColToIndex({
|
||||
line,
|
||||
character,
|
||||
}: {
|
||||
line: number
|
||||
character: number
|
||||
}): number | undefined {
|
||||
const startIx = lineStartIndex[line]
|
||||
if (startIx == null) return
|
||||
const ix = startIx + character
|
||||
if (ix > length) return
|
||||
return ix
|
||||
}
|
||||
|
||||
return { lineColToIndex }
|
||||
}
|
||||
|
||||
/** Convert the Language Server's diagnostics to CodeMirror diagnostics. */
|
||||
function lsDiagnosticsToCMDiagnostics(
|
||||
diagnostics: LSDiagnostic[],
|
||||
lineColToIndex: (lineCol: Position) => number | undefined,
|
||||
) {
|
||||
const results: Diagnostic[] = []
|
||||
for (const diagnostic of diagnostics) {
|
||||
if (!diagnostic.location) continue
|
||||
const from = lineColToIndex(diagnostic.location.start)
|
||||
const to = lineColToIndex(diagnostic.location.end)
|
||||
if (to == null || from == null) {
|
||||
// Suppress temporary errors if the source is not the version of the document the LS is reporting diagnostics for.
|
||||
continue
|
||||
}
|
||||
const severity =
|
||||
diagnostic.kind === 'Error' ? 'error'
|
||||
: diagnostic.kind === 'Warning' ? 'warning'
|
||||
: 'info'
|
||||
results.push({ from, to, message: diagnostic.message, severity })
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* CodeMirror extension providing diagnostics for an Enso module. Provides CodeMirror diagnostics based on dataflow
|
||||
* errors, and diagnostics the LS provided in an `executionStatus` message.
|
||||
*/
|
||||
export function useEnsoDiagnostics(
|
||||
projectStore: Pick<ProjectStore, 'computedValueRegistry' | 'dataflowErrors' | 'diagnostics'>,
|
||||
graphStore: Pick<GraphStore, 'moduleSource' | 'db'>,
|
||||
editorView: EditorView,
|
||||
): Extension {
|
||||
const expressionUpdatesDiagnostics = computed(() => {
|
||||
const updates = projectStore.computedValueRegistry.db
|
||||
const panics = updates.type.reverseLookup('Panic')
|
||||
const errors = updates.type.reverseLookup('DataflowError')
|
||||
const diagnostics: Diagnostic[] = []
|
||||
for (const externalId of iter.chain(panics, errors)) {
|
||||
const update = updates.get(externalId)
|
||||
if (!update) continue
|
||||
const astId = graphStore.db.idFromExternal(externalId)
|
||||
if (!astId) continue
|
||||
const span = graphStore.moduleSource.getSpan(astId)
|
||||
if (!span) continue
|
||||
const [from, to] = span
|
||||
switch (update.payload.type) {
|
||||
case 'Panic': {
|
||||
diagnostics.push({ from, to, message: update.payload.message, severity: 'error' })
|
||||
break
|
||||
}
|
||||
case 'DataflowError': {
|
||||
const error = projectStore.dataflowErrors.lookup(externalId)
|
||||
if (error?.value?.message) {
|
||||
diagnostics.push({ from, to, message: error.value.message, severity: 'error' })
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return diagnostics
|
||||
})
|
||||
watch([executionContextDiagnostics, expressionUpdatesDiagnostics], () => {
|
||||
editorView.dispatch({ effects: diagnosticsUpdated.of(null) })
|
||||
forceLinting(editorView)
|
||||
})
|
||||
// The LS protocol doesn't identify what version of the file updates are in reference to. When diagnostics are
|
||||
// received from the LS, we map them to the text assuming that they are applicable to the current version of the
|
||||
// module. This will be correct if there is no one else editing, and we aren't editing faster than the LS can send
|
||||
// updates. Typing too quickly can result in incorrect ranges, but at idle it should correct itself when we receive
|
||||
// new diagnostics.
|
||||
watch(
|
||||
() => projectStore.diagnostics,
|
||||
(diagnostics) => {
|
||||
const { lineColToIndex } = stringPosConverter(graphStore.moduleSource.text)
|
||||
executionContextDiagnostics.value = lsDiagnosticsToCMDiagnostics(diagnostics, lineColToIndex)
|
||||
},
|
||||
)
|
||||
return [
|
||||
diagnosticsVersion,
|
||||
linter(() => [...executionContextDiagnostics.value, ...expressionUpdatesDiagnostics.value], {
|
||||
needsRefresh(update) {
|
||||
return (
|
||||
update.state.field(diagnosticsVersion) !== update.startState.field(diagnosticsVersion)
|
||||
)
|
||||
},
|
||||
}),
|
||||
]
|
||||
}
|
116
app/gui/src/project-view/components/CodeEditor/ensoSyntax.ts
Normal file
116
app/gui/src/project-view/components/CodeEditor/ensoSyntax.ts
Normal file
@ -0,0 +1,116 @@
|
||||
import { RawAstExtended } from '@/util/ast/extended'
|
||||
import { RawAst } from '@/util/ast/raw'
|
||||
import {
|
||||
defineLanguageFacet,
|
||||
foldNodeProp,
|
||||
Language,
|
||||
languageDataProp,
|
||||
LanguageSupport,
|
||||
} from '@codemirror/language'
|
||||
import {
|
||||
type Input,
|
||||
NodeProp,
|
||||
NodeSet,
|
||||
NodeType,
|
||||
Parser,
|
||||
type PartialParse,
|
||||
Tree,
|
||||
} from '@lezer/common'
|
||||
import { styleTags, tags } from '@lezer/highlight'
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
|
||||
const nodeTypes: NodeType[] = [
|
||||
...RawAst.Tree.typeNames.map((name, id) => NodeType.define({ id, name })),
|
||||
...RawAst.Token.typeNames.map((name, id) =>
|
||||
NodeType.define({ id: id + RawAst.Tree.typeNames.length, name: 'Token' + name }),
|
||||
),
|
||||
]
|
||||
|
||||
const nodeSet = new NodeSet(nodeTypes).extend(
|
||||
styleTags({
|
||||
Ident: tags.variableName,
|
||||
'Private!': tags.variableName,
|
||||
Number: tags.number,
|
||||
'Wildcard!': tags.variableName,
|
||||
'TextLiteral!': tags.string,
|
||||
OprApp: tags.operator,
|
||||
TokenOperator: tags.operator,
|
||||
'Assignment/TokenOperator': tags.definitionOperator,
|
||||
UnaryOprApp: tags.operator,
|
||||
'Function/Ident': tags.function(tags.variableName),
|
||||
ForeignFunction: tags.function(tags.variableName),
|
||||
'Import/TokenIdent': tags.function(tags.moduleKeyword),
|
||||
Export: tags.function(tags.moduleKeyword),
|
||||
Lambda: tags.function(tags.variableName),
|
||||
Documented: tags.docComment,
|
||||
ConstructorDefinition: tags.function(tags.variableName),
|
||||
}),
|
||||
foldNodeProp.add({
|
||||
Function: (node) => node.lastChild,
|
||||
ArgumentBlockApplication: (node) => node,
|
||||
OperatorBlockApplication: (node) => node,
|
||||
}),
|
||||
)
|
||||
|
||||
type AstNode = RawAstExtended<RawAst.Tree | RawAst.Token, false>
|
||||
const astProp = new NodeProp<AstNode>({ perNode: true })
|
||||
|
||||
function astToCodeMirrorTree(
|
||||
nodeSet: NodeSet,
|
||||
ast: AstNode,
|
||||
props?: readonly [number | NodeProp<any>, any][] | undefined,
|
||||
): Tree {
|
||||
const [start, end] = ast.span()
|
||||
const children = ast.children()
|
||||
|
||||
const childrenToConvert = iter.tryGetSoleValue(children)?.isToken() ? [] : children
|
||||
|
||||
return new Tree(
|
||||
nodeSet.types[ast.inner.type + (ast.isToken() ? RawAst.Tree.typeNames.length : 0)]!,
|
||||
childrenToConvert.map((child) => astToCodeMirrorTree(nodeSet, child)),
|
||||
childrenToConvert.map((child) => child.span()[0] - start),
|
||||
end - start,
|
||||
[...(props ?? []), [astProp, ast]],
|
||||
)
|
||||
}
|
||||
|
||||
const facet = defineLanguageFacet()
|
||||
|
||||
class EnsoParser extends Parser {
|
||||
nodeSet
|
||||
constructor() {
|
||||
super()
|
||||
this.nodeSet = nodeSet
|
||||
}
|
||||
cachedCode: string | undefined
|
||||
cachedTree: Tree | undefined
|
||||
createParse(input: Input): PartialParse {
|
||||
return {
|
||||
parsedPos: input.length,
|
||||
stopAt: () => {},
|
||||
stoppedAt: null,
|
||||
advance: () => {
|
||||
const code = input.read(0, input.length)
|
||||
if (code !== this.cachedCode || this.cachedTree == null) {
|
||||
this.cachedCode = code
|
||||
const ast = RawAstExtended.parse(code)
|
||||
this.cachedTree = astToCodeMirrorTree(this.nodeSet, ast, [[languageDataProp, facet]])
|
||||
}
|
||||
return this.cachedTree
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class EnsoLanguage extends Language {
|
||||
constructor() {
|
||||
super(facet, new EnsoParser())
|
||||
}
|
||||
}
|
||||
|
||||
const ensoLanguage = new EnsoLanguage()
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function ensoSyntax() {
|
||||
return new LanguageSupport(ensoLanguage)
|
||||
}
|
123
app/gui/src/project-view/components/CodeEditor/sync.ts
Normal file
123
app/gui/src/project-view/components/CodeEditor/sync.ts
Normal file
@ -0,0 +1,123 @@
|
||||
import type { GraphStore } from '@/stores/graph'
|
||||
import { Annotation, ChangeSet, type ChangeSpec } from '@codemirror/state'
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { createDebouncer } from 'lib0/eventloop'
|
||||
import { onUnmounted } from 'vue'
|
||||
import { MutableModule } from 'ydoc-shared/ast'
|
||||
import { SourceRangeEdit, textChangeToEdits } from 'ydoc-shared/util/data/text'
|
||||
import type { Origin } from 'ydoc-shared/yjsModel'
|
||||
|
||||
function changeSetToTextEdits(changes: ChangeSet) {
|
||||
const textEdits = new Array<SourceRangeEdit>()
|
||||
changes.iterChanges((from, to, _fromB, _toB, insert) =>
|
||||
textEdits.push({ range: [from, to], insert: insert.toString() }),
|
||||
)
|
||||
return textEdits
|
||||
}
|
||||
|
||||
function textEditToChangeSpec({ range: [from, to], insert }: SourceRangeEdit): ChangeSpec {
|
||||
return { from, to, insert }
|
||||
}
|
||||
|
||||
// Indicates a change updating the text to correspond to the given module state.
|
||||
const synchronizedModule = Annotation.define<MutableModule>()
|
||||
|
||||
/** @returns A CodeMirror Extension that synchronizes the editor state with the AST of an Enso module. */
|
||||
export function useEnsoSourceSync(
|
||||
graphStore: Pick<GraphStore, 'moduleSource' | 'viewModule' | 'startEdit' | 'commitEdit'>,
|
||||
editorView: EditorView,
|
||||
) {
|
||||
let pendingChanges: ChangeSet | undefined
|
||||
let currentModule: MutableModule | undefined
|
||||
|
||||
const debounceUpdates = createDebouncer(0)
|
||||
const updateListener = EditorView.updateListener.of((update) => {
|
||||
for (const transaction of update.transactions) {
|
||||
const newModule = transaction.annotation(synchronizedModule)
|
||||
if (newModule) {
|
||||
// Flush the pipeline of edits that were based on the old module.
|
||||
commitPendingChanges()
|
||||
currentModule = newModule
|
||||
} else if (transaction.docChanged && currentModule) {
|
||||
pendingChanges =
|
||||
pendingChanges ? pendingChanges.compose(transaction.changes) : transaction.changes
|
||||
// Defer the update until after pending events have been processed, so that if changes are arriving faster
|
||||
// than we would be able to apply them individually we coalesce them to keep up.
|
||||
debounceUpdates(commitPendingChanges)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
/** Set the editor contents the current module state, discarding any pending editor-initiated changes. */
|
||||
function resetView() {
|
||||
pendingChanges = undefined
|
||||
currentModule = undefined
|
||||
const viewText = editorView.state.doc.toString()
|
||||
const code = graphStore.moduleSource.text
|
||||
const changes = textChangeToEdits(viewText, code).map(textEditToChangeSpec)
|
||||
console.info('Resetting the editor to the module code.', changes)
|
||||
editorView.dispatch({
|
||||
changes,
|
||||
annotations: synchronizedModule.of(graphStore.startEdit()),
|
||||
})
|
||||
}
|
||||
|
||||
function checkSync() {
|
||||
const code = graphStore.viewModule.root()?.code() ?? ''
|
||||
const viewText = editorView.state.doc.toString()
|
||||
const uncommitted = textChangeToEdits(code, viewText).map(textEditToChangeSpec)
|
||||
if (uncommitted.length > 0) {
|
||||
console.warn(`Module source was not synced to editor content\n${code}`, uncommitted)
|
||||
}
|
||||
}
|
||||
|
||||
/** Apply any pending changes to the currently-synchronized module, clearing the set of pending changes. */
|
||||
function commitPendingChanges() {
|
||||
if (!pendingChanges || !currentModule) return
|
||||
const changes = pendingChanges
|
||||
pendingChanges = undefined
|
||||
const edits = changeSetToTextEdits(changes)
|
||||
try {
|
||||
currentModule.applyTextEdits(edits, graphStore.viewModule)
|
||||
graphStore.commitEdit(currentModule, undefined, 'local:userAction:CodeEditor')
|
||||
checkSync()
|
||||
} catch (error) {
|
||||
console.error(`Code Editor failed to modify module`, error)
|
||||
resetView()
|
||||
}
|
||||
}
|
||||
|
||||
let needResync = false
|
||||
function observeSourceChange(textEdits: readonly SourceRangeEdit[], origin: Origin | undefined) {
|
||||
// If we received an update from outside the Code Editor while the editor contained uncommitted changes, we cannot
|
||||
// proceed incrementally; we wait for the changes to be merged as Y.Js AST updates, and then set the view to the
|
||||
// resulting code.
|
||||
if (needResync) {
|
||||
if (!pendingChanges) {
|
||||
resetView()
|
||||
needResync = false
|
||||
}
|
||||
return
|
||||
}
|
||||
// When we aren't in the `needResync` state, we can ignore updates that originated in the Code Editor.
|
||||
if (origin === 'local:userAction:CodeEditor') {
|
||||
return
|
||||
}
|
||||
if (pendingChanges) {
|
||||
console.info(`Deferring update (editor dirty).`)
|
||||
needResync = true
|
||||
return
|
||||
}
|
||||
|
||||
// If none of the above exit-conditions were reached, the transaction is applicable to our current state.
|
||||
editorView.dispatch({
|
||||
changes: textEdits.map(textEditToChangeSpec),
|
||||
annotations: synchronizedModule.of(graphStore.startEdit()),
|
||||
})
|
||||
}
|
||||
onUnmounted(() => graphStore.moduleSource.unobserve(observeSourceChange))
|
||||
return {
|
||||
updateListener,
|
||||
connectModuleListener: () => graphStore.moduleSource.observe(observeSourceChange),
|
||||
}
|
||||
}
|
106
app/gui/src/project-view/components/CodeEditor/tooltips.ts
Normal file
106
app/gui/src/project-view/components/CodeEditor/tooltips.ts
Normal file
@ -0,0 +1,106 @@
|
||||
import type { GraphStore, NodeId } from '@/stores/graph'
|
||||
import { type SuggestionDbStore } from '@/stores/suggestionDatabase'
|
||||
import { type RawAstExtended } from '@/util/ast/extended'
|
||||
import { RawAst } from '@/util/ast/raw'
|
||||
import { qnJoin, tryQualifiedName } from '@/util/qualifiedName'
|
||||
import { syntaxTree } from '@codemirror/language'
|
||||
import { type Extension } from '@codemirror/state'
|
||||
import {
|
||||
type EditorView,
|
||||
hoverTooltip as originalHoverTooltip,
|
||||
tooltips,
|
||||
type TooltipView,
|
||||
} from '@codemirror/view'
|
||||
import { NodeProp, type SyntaxNode } from '@lezer/common'
|
||||
import { unwrap } from 'ydoc-shared/util/data/result'
|
||||
import { rangeEncloses } from 'ydoc-shared/yjsModel'
|
||||
|
||||
type AstNode = RawAstExtended<RawAst.Tree | RawAst.Token, false>
|
||||
const astProp = new NodeProp<AstNode>({ perNode: true })
|
||||
|
||||
/** TODO: Add docs */
|
||||
function hoverTooltip(
|
||||
create: (
|
||||
ast: AstNode,
|
||||
syntax: SyntaxNode,
|
||||
) => TooltipView | ((view: EditorView) => TooltipView) | null | undefined,
|
||||
): Extension {
|
||||
return [
|
||||
tooltips({ position: 'absolute' }),
|
||||
originalHoverTooltip((view, pos, side) => {
|
||||
const syntaxNode = syntaxTree(view.state).resolveInner(pos, side)
|
||||
const astNode = syntaxNode.tree?.prop(astProp)
|
||||
if (astNode == null) return null
|
||||
const domOrCreate = create(astNode, syntaxNode)
|
||||
if (domOrCreate == null) return null
|
||||
|
||||
return {
|
||||
pos: syntaxNode.from,
|
||||
end: syntaxNode.to,
|
||||
above: true,
|
||||
arrow: true,
|
||||
create: typeof domOrCreate !== 'function' ? () => domOrCreate : domOrCreate,
|
||||
}
|
||||
}),
|
||||
]
|
||||
}
|
||||
|
||||
/** @returns A CodeMirror extension that creates tooltips containing type and syntax information for Enso code. */
|
||||
export function ensoHoverTooltip(
|
||||
graphStore: Pick<GraphStore, 'moduleSource' | 'db'>,
|
||||
suggestionDbStore: Pick<SuggestionDbStore, 'entries'>,
|
||||
) {
|
||||
return hoverTooltip((ast, syn) => {
|
||||
const dom = document.createElement('div')
|
||||
const astSpan = ast.span()
|
||||
let foundNode: NodeId | undefined
|
||||
for (const [id, node] of graphStore.db.nodeIdToNode.entries()) {
|
||||
const rootSpan = graphStore.moduleSource.getSpan(node.rootExpr.id)
|
||||
if (rootSpan && rangeEncloses(rootSpan, astSpan)) {
|
||||
foundNode = id
|
||||
break
|
||||
}
|
||||
}
|
||||
const expressionInfo = foundNode && graphStore.db.getExpressionInfo(foundNode)
|
||||
const nodeColor = foundNode && graphStore.db.getNodeColorStyle(foundNode)
|
||||
|
||||
if (foundNode != null) {
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`AST ID: ${foundNode}`))
|
||||
}
|
||||
if (expressionInfo != null) {
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`Type: ${expressionInfo.typename ?? 'Unknown'}`))
|
||||
}
|
||||
if (expressionInfo?.profilingInfo[0] != null) {
|
||||
const profile = expressionInfo.profilingInfo[0]
|
||||
const executionTime = (profile.ExecutionTime.nanoTime / 1_000_000).toFixed(3)
|
||||
const text = `Execution Time: ${executionTime}ms`
|
||||
dom.appendChild(document.createElement('div')).appendChild(document.createTextNode(text))
|
||||
}
|
||||
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`Syntax: ${syn.toString()}`))
|
||||
const method = expressionInfo?.methodCall?.methodPointer
|
||||
if (method != null) {
|
||||
const moduleName = tryQualifiedName(method.module)
|
||||
const methodName = tryQualifiedName(method.name)
|
||||
const qualifiedName = qnJoin(unwrap(moduleName), unwrap(methodName))
|
||||
const [id] = suggestionDbStore.entries.nameToId.lookup(qualifiedName)
|
||||
const suggestionEntry = id != null ? suggestionDbStore.entries.get(id) : undefined
|
||||
if (suggestionEntry != null) {
|
||||
const groupNode = dom.appendChild(document.createElement('div'))
|
||||
groupNode.appendChild(document.createTextNode('Group: '))
|
||||
const groupNameNode = groupNode.appendChild(document.createElement('span'))
|
||||
groupNameNode.appendChild(document.createTextNode(`${method.module}.${method.name}`))
|
||||
if (nodeColor) {
|
||||
groupNameNode.style.color = nodeColor
|
||||
}
|
||||
}
|
||||
}
|
||||
return { dom }
|
||||
})
|
||||
}
|
@ -41,6 +41,8 @@ const PAN_MARGINS = {
|
||||
}
|
||||
const COMPONENT_EDITOR_PADDING = 12
|
||||
const ICON_WIDTH = 16
|
||||
// Component editor is larger than a typical node, so the edge should touch it a bit higher.
|
||||
const EDGE_Y_OFFSET = -6
|
||||
|
||||
const cssComponentEditorPadding = `${COMPONENT_EDITOR_PADDING}px`
|
||||
|
||||
@ -199,7 +201,9 @@ watchEffect(() => {
|
||||
return
|
||||
}
|
||||
const scenePos = originScenePos.value.add(
|
||||
new Vec2(COMPONENT_EDITOR_PADDING + ICON_WIDTH / 2, 0).scale(clientToSceneFactor.value),
|
||||
new Vec2(COMPONENT_EDITOR_PADDING + ICON_WIDTH / 2, 0)
|
||||
.scale(clientToSceneFactor.value)
|
||||
.add(new Vec2(0, EDGE_Y_OFFSET)),
|
||||
)
|
||||
graphStore.cbEditedEdge = {
|
||||
source,
|
||||
|
@ -42,7 +42,7 @@ defineExpose({
|
||||
|
||||
const rootStyle = computed(() => {
|
||||
return {
|
||||
'--node-color-primary': props.nodeColor,
|
||||
'--color-node-primary': props.nodeColor,
|
||||
'--port-edge-width': `${4 * props.navigator.scale}px`,
|
||||
}
|
||||
})
|
||||
@ -72,7 +72,7 @@ const rootStyle = computed(() => {
|
||||
|
||||
<style scoped>
|
||||
.ComponentEditor {
|
||||
--node-color-port: color-mix(in oklab, var(--node-color-primary) 85%, white 15%);
|
||||
--node-color-port: color-mix(in oklab, var(--color-node-primary) 85%, white 15%);
|
||||
--port-padding: 6px;
|
||||
--icon-height: 16px;
|
||||
--icon-text-gap: 6px;
|
||||
@ -105,17 +105,6 @@ const rootStyle = computed(() => {
|
||||
isolation: isolate;
|
||||
}
|
||||
|
||||
.iconPort::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: calc(var(--port-padding) - var(--component-editor-padding));
|
||||
width: var(--port-edge-width);
|
||||
height: calc(var(--component-editor-padding) - var(--port-padding) + var(--icon-height) / 2);
|
||||
transform: translate(-50%, 0);
|
||||
background-color: var(--node-color-port);
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
.nodeIcon {
|
||||
color: white;
|
||||
width: var(--icon-height);
|
||||
|
@ -1,13 +1,17 @@
|
||||
<script setup lang="ts">
|
||||
import { documentationEditorBindings } from '@/bindings'
|
||||
import FullscreenButton from '@/components/FullscreenButton.vue'
|
||||
import MarkdownEditor from '@/components/MarkdownEditor.vue'
|
||||
import { fetcherUrlTransformer } from '@/components/MarkdownEditor/imageUrlTransformer'
|
||||
import WithFullscreenMode from '@/components/WithFullscreenMode.vue'
|
||||
import { useGraphStore } from '@/stores/graph'
|
||||
import { useProjectStore } from '@/stores/project'
|
||||
import { useProjectFiles } from '@/stores/projectFiles'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import type { ToValue } from '@/util/reactivity'
|
||||
import { ref, toRef, toValue, watch } from 'vue'
|
||||
import type { Path } from 'ydoc-shared/languageServerTypes'
|
||||
import { useToast } from '@/util/toast'
|
||||
import { ComponentInstance, computed, reactive, ref, toRef, toValue, watch } from 'vue'
|
||||
import type { Path, Uuid } from 'ydoc-shared/languageServerTypes'
|
||||
import { Err, Ok, mapOk, withContext, type Result } from 'ydoc-shared/util/data/result'
|
||||
import * as Y from 'yjs'
|
||||
|
||||
@ -19,26 +23,42 @@ const emit = defineEmits<{
|
||||
}>()
|
||||
|
||||
const toolbarElement = ref<HTMLElement>()
|
||||
const markdownEditor = ref<ComponentInstance<typeof MarkdownEditor>>()
|
||||
|
||||
const graphStore = useGraphStore()
|
||||
const projectStore = useProjectStore()
|
||||
const { transformImageUrl } = useDocumentationImages(
|
||||
const { transformImageUrl, uploadImage } = useDocumentationImages(
|
||||
toRef(graphStore, 'modulePath'),
|
||||
projectStore.readFileBinary,
|
||||
useProjectFiles(projectStore),
|
||||
)
|
||||
const uploadErrorToast = useToast.error()
|
||||
|
||||
type UploadedImagePosition = { type: 'selection' } | { type: 'coords'; coords: Vec2 }
|
||||
|
||||
/**
|
||||
* A Project File management API for {@link useDocumentationImages} composable.
|
||||
*/
|
||||
interface ProjectFilesAPI {
|
||||
projectRootId: Promise<Uuid | undefined>
|
||||
readFileBinary(path: Path): Promise<Result<Blob>>
|
||||
writeFileBinary(path: Path, content: Blob): Promise<Result>
|
||||
pickUniqueName(path: Path, suggestedName: string): Promise<Result<string>>
|
||||
ensureDirExists(path: Path): Promise<Result<void>>
|
||||
}
|
||||
|
||||
function useDocumentationImages(
|
||||
modulePath: ToValue<Path | undefined>,
|
||||
readFileBinary: (path: Path) => Promise<Result<Blob>>,
|
||||
projectFiles: ProjectFilesAPI,
|
||||
) {
|
||||
async function urlToPath(url: string): Promise<Result<Path> | undefined> {
|
||||
function urlToPath(url: string): Result<Path> | undefined {
|
||||
const modulePathValue = toValue(modulePath)
|
||||
if (!modulePathValue) {
|
||||
return Err('Current module path is unknown.')
|
||||
}
|
||||
const appliedUrl = new URL(url, `file:///${modulePathValue.segments.join('/')}`)
|
||||
if (appliedUrl.protocol === 'file:') {
|
||||
const segments = appliedUrl.pathname.split('/')
|
||||
// The pathname starts with '/', so we remove "" segment.
|
||||
const segments = decodeURI(appliedUrl.pathname).split('/').slice(1)
|
||||
return Ok({ rootId: modulePathValue.rootId, segments })
|
||||
} else {
|
||||
// Not a relative URL, custom fetching not needed.
|
||||
@ -54,24 +74,81 @@ function useDocumentationImages(
|
||||
return pathUniqueId(path)
|
||||
}
|
||||
|
||||
const currentlyUploading = reactive(new Map<string, Promise<Blob>>())
|
||||
|
||||
const transformImageUrl = fetcherUrlTransformer(
|
||||
async (url: string) => {
|
||||
const path = await urlToPath(url)
|
||||
if (!path) return
|
||||
return withContext(
|
||||
() => `Locating documentation image (${url})`,
|
||||
() => mapOk(path, (path) => ({ location: path, uniqueId: pathUniqueId(path) })),
|
||||
() =>
|
||||
mapOk(path, (path) => {
|
||||
const id = pathUniqueId(path)
|
||||
return {
|
||||
location: path,
|
||||
uniqueId: id,
|
||||
uploading: computed(() => currentlyUploading.has(id)),
|
||||
}
|
||||
}),
|
||||
)
|
||||
},
|
||||
async (path) => {
|
||||
return withContext(
|
||||
() => `Loading documentation image (${pathDebugRepr(path)})`,
|
||||
async () => await readFileBinary(path),
|
||||
async () => {
|
||||
const uploaded = await currentlyUploading.get(pathUniqueId(path))
|
||||
return uploaded ? Ok(uploaded) : projectFiles.readFileBinary(path)
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
return { transformImageUrl }
|
||||
async function uploadImage(
|
||||
name: string,
|
||||
blobPromise: Promise<Blob>,
|
||||
position: UploadedImagePosition = { type: 'selection' },
|
||||
) {
|
||||
const rootId = await projectFiles.projectRootId
|
||||
if (!rootId) {
|
||||
uploadErrorToast.show('Cannot upload image: unknown project file tree root.')
|
||||
return
|
||||
}
|
||||
if (!markdownEditor.value || !markdownEditor.value.loaded) {
|
||||
console.error('Tried to upload image while mardown editor is still not loaded')
|
||||
return
|
||||
}
|
||||
const dirPath = { rootId, segments: ['images'] }
|
||||
await projectFiles.ensureDirExists(dirPath)
|
||||
const filename = await projectFiles.pickUniqueName(dirPath, name)
|
||||
if (!filename.ok) {
|
||||
uploadErrorToast.reportError(filename.error)
|
||||
return
|
||||
}
|
||||
const path: Path = { rootId, segments: ['images', filename.value] }
|
||||
const id = pathUniqueId(path)
|
||||
currentlyUploading.set(id, blobPromise)
|
||||
|
||||
const insertedLink = `\n![Image](/images/${encodeURI(filename.value)})\n`
|
||||
switch (position.type) {
|
||||
case 'selection':
|
||||
markdownEditor.value.putText(insertedLink)
|
||||
break
|
||||
case 'coords':
|
||||
markdownEditor.value.putTextAtCoord(insertedLink, position.coords)
|
||||
break
|
||||
}
|
||||
try {
|
||||
const blob = await blobPromise
|
||||
const uploadResult = await projectFiles.writeFileBinary(path, blob)
|
||||
if (!uploadResult.ok)
|
||||
uploadErrorToast.reportError(uploadResult.error, 'Failed to upload image')
|
||||
} finally {
|
||||
currentlyUploading.delete(id)
|
||||
}
|
||||
}
|
||||
|
||||
return { transformImageUrl, uploadImage }
|
||||
}
|
||||
|
||||
const fullscreen = ref(false)
|
||||
@ -81,6 +158,55 @@ watch(
|
||||
() => fullscreen.value || fullscreenAnimating.value,
|
||||
(fullscreenOrAnimating) => emit('update:fullscreen', fullscreenOrAnimating),
|
||||
)
|
||||
|
||||
const supportedImageTypes: Record<string, { extension: string }> = {
|
||||
// List taken from https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types
|
||||
'image/apng': { extension: 'apng' },
|
||||
'image/avif': { extension: 'avif' },
|
||||
'image/gif': { extension: 'gif' },
|
||||
'image/jpeg': { extension: 'jpg' },
|
||||
'image/png': { extension: 'png' },
|
||||
'image/svg+xml': { extension: 'svg' },
|
||||
'image/webp': { extension: 'webp' },
|
||||
// Question: do we want to have BMP and ICO here?
|
||||
}
|
||||
|
||||
async function handleFileDrop(event: DragEvent) {
|
||||
if (!event.dataTransfer?.items) return
|
||||
for (const item of event.dataTransfer.items) {
|
||||
if (item.kind !== 'file' || !Object.hasOwn(supportedImageTypes, item.type)) continue
|
||||
const file = item.getAsFile()
|
||||
if (!file) continue
|
||||
const clientPos = new Vec2(event.clientX, event.clientY)
|
||||
event.stopPropagation()
|
||||
event.preventDefault()
|
||||
await uploadImage(file.name, Promise.resolve(file), { type: 'coords', coords: clientPos })
|
||||
}
|
||||
}
|
||||
|
||||
const handler = documentationEditorBindings.handler({
|
||||
paste: () => {
|
||||
window.navigator.clipboard.read().then(async (items) => {
|
||||
if (markdownEditor.value == null) return
|
||||
for (const item of items) {
|
||||
const textType = item.types.find((type) => type === 'text/plain')
|
||||
if (textType) {
|
||||
const blob = await item.getType(textType)
|
||||
markdownEditor.value.putText(await blob.text())
|
||||
break
|
||||
}
|
||||
const imageType = item.types.find((type) => type in supportedImageTypes)
|
||||
if (imageType) {
|
||||
const ext = supportedImageTypes[imageType]?.extension ?? ''
|
||||
uploadImage(`image.${ext}`, item.getType(imageType)).catch((err) =>
|
||||
uploadErrorToast.show(`Failed to upload image: ${err}`),
|
||||
)
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
@ -89,9 +215,15 @@ watch(
|
||||
<div ref="toolbarElement" class="toolbar">
|
||||
<FullscreenButton v-model="fullscreen" />
|
||||
</div>
|
||||
<div class="scrollArea">
|
||||
<div
|
||||
class="scrollArea"
|
||||
@keydown="handler"
|
||||
@dragover.prevent
|
||||
@drop.prevent="handleFileDrop($event)"
|
||||
>
|
||||
<MarkdownEditor
|
||||
:yText="yText"
|
||||
ref="markdownEditor"
|
||||
:content="yText"
|
||||
:transformImageUrl="transformImageUrl"
|
||||
:toolbarContainer="toolbarElement"
|
||||
/>
|
||||
|
@ -664,11 +664,6 @@ async function handleFileDrop(event: DragEvent) {
|
||||
const MULTIPLE_FILES_GAP = 50
|
||||
|
||||
if (!event.dataTransfer?.items) return
|
||||
const projectRootId = await projectStore.projectRootId
|
||||
if (projectRootId == null) {
|
||||
toasts.userActionFailed.show(`Unable to upload file(s): Could not identify project root.`)
|
||||
return
|
||||
}
|
||||
;[...event.dataTransfer.items].forEach(async (item, index) => {
|
||||
if (item.kind === 'file') {
|
||||
const file = item.getAsFile()
|
||||
@ -677,10 +672,7 @@ async function handleFileDrop(event: DragEvent) {
|
||||
const offset = new Vec2(0, index * -MULTIPLE_FILES_GAP)
|
||||
const pos = graphNavigator.clientToScenePos(clientPos).add(offset)
|
||||
const uploader = Uploader.Create(
|
||||
projectStore.lsRpcConnection,
|
||||
projectStore.dataConnection,
|
||||
projectRootId,
|
||||
projectStore.awareness,
|
||||
projectStore,
|
||||
file,
|
||||
pos,
|
||||
projectStore.isOnLocalBackend,
|
||||
|
@ -73,8 +73,7 @@ const targetPos = computed<Vec2 | undefined>(() => {
|
||||
if (expr != null && targetNode.value != null && targetNodeRect.value != null) {
|
||||
const targetRectRelative = graph.getPortRelativeRect(expr)
|
||||
if (targetRectRelative == null) return
|
||||
const yAdjustment =
|
||||
targetIsSelfArgument.value ? -(selfArgumentArrowHeight + selfArgumentArrowYOffset) : 0
|
||||
const yAdjustment = -(arrowHeight + arrowYOffset)
|
||||
return targetNodeRect.value.pos.add(new Vec2(targetRectRelative.center().x, yAdjustment))
|
||||
} else if (mouseAnchorPos.value != null) {
|
||||
return mouseAnchorPos.value
|
||||
@ -509,29 +508,18 @@ const backwardEdgeArrowTransform = computed<string | undefined>(() => {
|
||||
return svgTranslate(origin.add(points[1]))
|
||||
})
|
||||
|
||||
const targetIsSelfArgument = computed(() => {
|
||||
if ('targetIsSelfArgument' in props.edge && props.edge?.targetIsSelfArgument) return true
|
||||
if (!targetExpr.value) return
|
||||
const nodeId = graph.getPortNodeId(targetExpr.value)
|
||||
if (!nodeId) return
|
||||
const primarySubject = graph.db.nodeIdToNode.get(nodeId)?.primarySubject
|
||||
if (!primarySubject) return
|
||||
return targetExpr.value === primarySubject
|
||||
})
|
||||
|
||||
const selfArgumentArrowHeight = 9
|
||||
const selfArgumentArrowYOffset = 0
|
||||
const selfArgumentArrowTransform = computed<string | undefined>(() => {
|
||||
const selfArgumentArrowTopOffset = 4
|
||||
const selfArgumentArrowWidth = 12
|
||||
if (!targetIsSelfArgument.value) return
|
||||
const arrowHeight = 9
|
||||
const arrowYOffset = 0
|
||||
const arrowTransform = computed<string | undefined>(() => {
|
||||
const arrowTopOffset = 4
|
||||
const arrowWidth = 12
|
||||
const target = targetPos.value
|
||||
if (target == null) return
|
||||
const pos = target.sub(new Vec2(selfArgumentArrowWidth / 2, selfArgumentArrowTopOffset))
|
||||
const pos = target.sub(new Vec2(arrowWidth / 2, arrowTopOffset))
|
||||
return svgTranslate(pos)
|
||||
})
|
||||
|
||||
const selfArgumentArrowPath = [
|
||||
const arrowPath = [
|
||||
'M10.9635 1.5547',
|
||||
'L6.83205 7.75193',
|
||||
'C6.43623 8.34566 5.56377 8.34566 5.16795 7.75192',
|
||||
@ -620,9 +608,9 @@ const sourceHoverAnimationStyle = computed(() => {
|
||||
:data-target-node-id="targetNode"
|
||||
/>
|
||||
<path
|
||||
v-if="selfArgumentArrowTransform"
|
||||
:transform="selfArgumentArrowTransform"
|
||||
:d="selfArgumentArrowPath"
|
||||
v-if="arrowTransform"
|
||||
:transform="arrowTransform"
|
||||
:d="arrowPath"
|
||||
:class="{ arrow: true, visible: true, dimmed: targetEndIsDimmed }"
|
||||
:style="baseStyle"
|
||||
/>
|
||||
|
@ -462,7 +462,6 @@ function recomputeOnce() {
|
||||
:nodePosition="nodePosition"
|
||||
:nodeSize="graphSelectionSize"
|
||||
:class="{ draggable: true, dragged: isDragged }"
|
||||
:selected
|
||||
:color
|
||||
:externalHovered="nodeHovered"
|
||||
@visible="selectionVisible = $event"
|
||||
@ -605,10 +604,21 @@ function recomputeOnce() {
|
||||
height: var(--node-size-y);
|
||||
rx: var(--node-border-radius);
|
||||
|
||||
fill: var(--node-color-primary);
|
||||
fill: var(--color-node-background);
|
||||
transition: fill 0.2s ease;
|
||||
}
|
||||
|
||||
.GraphNode {
|
||||
--color-node-text: white;
|
||||
--color-node-primary: var(--node-group-color);
|
||||
--color-node-background: var(--node-group-color);
|
||||
}
|
||||
|
||||
.GraphNode.selected {
|
||||
--color-node-background: color-mix(in oklab, var(--color-node-primary) 30%, white 70%);
|
||||
--color-node-text: color-mix(in oklab, var(--color-node-primary) 70%, black 30%);
|
||||
}
|
||||
|
||||
.GraphNode {
|
||||
position: absolute;
|
||||
border-radius: var(--node-border-radius);
|
||||
@ -617,17 +627,13 @@ function recomputeOnce() {
|
||||
/** Space between node and component above and below, such as comments and errors. */
|
||||
--node-vertical-gap: 4px;
|
||||
|
||||
--node-color-primary: color-mix(
|
||||
in oklab,
|
||||
var(--node-group-color) 100%,
|
||||
var(--node-group-color) 0%
|
||||
);
|
||||
--node-color-port: color-mix(in oklab, var(--node-color-primary) 85%, white 15%);
|
||||
--color-node-primary: var(--node-group-color);
|
||||
--node-color-port: color-mix(in oklab, var(--color-node-primary) 85%, white 15%);
|
||||
--node-color-error: color-mix(in oklab, var(--node-group-color) 30%, rgb(255, 0, 0) 70%);
|
||||
|
||||
&.executionState-Unknown,
|
||||
&.executionState-Pending {
|
||||
--node-color-primary: color-mix(in oklab, var(--node-group-color) 60%, #aaa 40%);
|
||||
--color-node-primary: color-mix(in oklab, var(--node-group-color) 60%, #aaa 40%);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,7 +202,7 @@ graph.suggestEdgeFromOutput(outputHovered)
|
||||
z-index: 10;
|
||||
text-anchor: middle;
|
||||
opacity: calc(var(--hover-animation) * var(--hover-animation));
|
||||
fill: var(--node-color-primary);
|
||||
fill: var(--color-node-primary);
|
||||
transform: translate(50%, calc(var(--node-size-y) + var(--output-port-max-width) + 16px));
|
||||
}
|
||||
</style>
|
||||
|
@ -5,7 +5,6 @@ import { computed, ref, watchEffect } from 'vue'
|
||||
const props = defineProps<{
|
||||
nodePosition: Vec2
|
||||
nodeSize: Vec2
|
||||
selected: boolean
|
||||
externalHovered: boolean
|
||||
color: string
|
||||
}>()
|
||||
@ -15,7 +14,7 @@ const emit = defineEmits<{
|
||||
}>()
|
||||
|
||||
const hovered = ref(false)
|
||||
const visible = computed(() => props.selected || props.externalHovered || hovered.value)
|
||||
const visible = computed(() => props.externalHovered || hovered.value)
|
||||
|
||||
watchEffect(() => emit('visible', visible.value))
|
||||
|
||||
@ -33,7 +32,7 @@ const rootStyle = computed(() => {
|
||||
<template>
|
||||
<div
|
||||
class="GraphNodeSelection"
|
||||
:class="{ visible, selected: props.selected }"
|
||||
:class="{ visible }"
|
||||
:style="rootStyle"
|
||||
@pointerenter="hovered = true"
|
||||
@pointerleave="hovered = false"
|
||||
@ -52,7 +51,7 @@ const rootStyle = computed(() => {
|
||||
&:before {
|
||||
position: absolute;
|
||||
content: '';
|
||||
opacity: 0.2;
|
||||
opacity: 0.3;
|
||||
display: block;
|
||||
inset: var(--selected-node-border-width);
|
||||
box-shadow: 0 0 0 calc(0px - var(--node-border-radius)) var(--selection-color);
|
||||
@ -67,8 +66,4 @@ const rootStyle = computed(() => {
|
||||
.GraphNodeSelection.visible::before {
|
||||
box-shadow: 0 0 0 var(--selected-node-border-width) var(--selection-color);
|
||||
}
|
||||
|
||||
.GraphNodeSelection:not(.selected):hover::before {
|
||||
opacity: 0.3;
|
||||
}
|
||||
</style>
|
||||
|
@ -125,7 +125,7 @@ export const ICON_WIDTH = 16
|
||||
|
||||
<style scoped>
|
||||
.NodeWidgetTree {
|
||||
color: white;
|
||||
color: var(--color-node-text);
|
||||
|
||||
outline: none;
|
||||
min-height: var(--node-port-height);
|
||||
|
@ -1,15 +1,14 @@
|
||||
import { Awareness } from '@/stores/awareness'
|
||||
import { ProjectFiles, useProjectFiles } from '@/stores/projectFiles'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import type { DataServer } from '@/util/net/dataServer'
|
||||
import { Keccak, sha3_224 as SHA3 } from '@noble/hashes/sha3'
|
||||
import type { Hash } from '@noble/hashes/utils'
|
||||
import { bytesToHex } from '@noble/hashes/utils'
|
||||
import { markRaw, toRaw } from 'vue'
|
||||
import { escapeTextLiteral } from 'ydoc-shared/ast/text'
|
||||
import type { LanguageServer } from 'ydoc-shared/languageServer'
|
||||
import { ErrorCode, RemoteRpcError } from 'ydoc-shared/languageServer'
|
||||
import type { Path, StackItem, Uuid } from 'ydoc-shared/languageServerTypes'
|
||||
import { Err, Ok, withContext, type Result } from 'ydoc-shared/util/data/result'
|
||||
import { Err, Ok, type Result } from 'ydoc-shared/util/data/result'
|
||||
|
||||
// === Constants ===
|
||||
|
||||
@ -47,13 +46,17 @@ export class Uploader {
|
||||
private checksum: Hash<Keccak>
|
||||
private uploadedBytes: bigint
|
||||
private stackItem: StackItem
|
||||
private awareness: Awareness
|
||||
private projectFiles: ProjectFiles
|
||||
|
||||
private constructor(
|
||||
private rpc: LanguageServer,
|
||||
private binary: DataServer,
|
||||
private awareness: Awareness,
|
||||
projectStore: {
|
||||
projectRootId: Promise<Uuid | undefined>
|
||||
lsRpcConnection: LanguageServer
|
||||
dataConnection: DataServer
|
||||
awareness: Awareness
|
||||
},
|
||||
private file: File,
|
||||
private projectRootId: Uuid,
|
||||
private position: Vec2,
|
||||
private isOnLocalBackend: boolean,
|
||||
private disableDirectRead: boolean,
|
||||
@ -62,14 +65,18 @@ export class Uploader {
|
||||
this.checksum = SHA3.create()
|
||||
this.uploadedBytes = BigInt(0)
|
||||
this.stackItem = markRaw(toRaw(stackItem))
|
||||
this.awareness = projectStore.awareness
|
||||
this.projectFiles = useProjectFiles(projectStore)
|
||||
}
|
||||
|
||||
/** Constructor */
|
||||
static Create(
|
||||
rpc: LanguageServer,
|
||||
binary: DataServer,
|
||||
projectRootId: Uuid,
|
||||
awareness: Awareness,
|
||||
projectStore: {
|
||||
projectRootId: Promise<Uuid | undefined>
|
||||
lsRpcConnection: LanguageServer
|
||||
dataConnection: DataServer
|
||||
awareness: Awareness
|
||||
},
|
||||
file: File,
|
||||
position: Vec2,
|
||||
isOnLocalBackend: boolean,
|
||||
@ -77,11 +84,8 @@ export class Uploader {
|
||||
stackItem: StackItem,
|
||||
): Uploader {
|
||||
return new Uploader(
|
||||
rpc,
|
||||
binary,
|
||||
awareness,
|
||||
projectStore,
|
||||
file,
|
||||
projectRootId,
|
||||
position,
|
||||
isOnLocalBackend,
|
||||
disableDirectRead,
|
||||
@ -100,20 +104,29 @@ export class Uploader {
|
||||
) {
|
||||
return Ok({ source: 'FileSystemRoot', name: this.file.path })
|
||||
}
|
||||
const dataDirExists = await this.ensureDataDirExists()
|
||||
const rootId = await this.projectFiles.projectRootId
|
||||
if (rootId == null) return Err('Could not identify project root.')
|
||||
const dataDirPath = { rootId, segments: [DATA_DIR_NAME] }
|
||||
const dataDirExists = await this.projectFiles.ensureDirExists(dataDirPath)
|
||||
if (!dataDirExists.ok) return dataDirExists
|
||||
const name = await this.pickUniqueName(this.file.name)
|
||||
const name = await this.projectFiles.pickUniqueName(dataDirPath, this.file.name)
|
||||
if (!name.ok) return name
|
||||
this.awareness.addOrUpdateUpload(name.value, {
|
||||
sizePercentage: 0,
|
||||
position: this.position,
|
||||
stackItem: this.stackItem,
|
||||
})
|
||||
const remotePath: Path = { rootId: this.projectRootId, segments: [DATA_DIR_NAME, name.value] }
|
||||
const remotePath: Path = { rootId, segments: [DATA_DIR_NAME, name.value] }
|
||||
const cleanup = this.cleanup.bind(this, name.value)
|
||||
const writableStream = new WritableStream<Uint8Array>({
|
||||
write: async (chunk: Uint8Array) => {
|
||||
await this.binary.writeBytes(remotePath, this.uploadedBytes, false, chunk)
|
||||
const result = await this.projectFiles.writeBytes(
|
||||
remotePath,
|
||||
this.uploadedBytes,
|
||||
false,
|
||||
chunk,
|
||||
)
|
||||
if (!result.ok) throw result.error
|
||||
this.checksum.update(chunk)
|
||||
this.uploadedBytes += BigInt(chunk.length)
|
||||
const bytes = Number(this.uploadedBytes)
|
||||
@ -127,13 +140,13 @@ export class Uploader {
|
||||
close: cleanup,
|
||||
abort: async (reason: string) => {
|
||||
cleanup()
|
||||
await this.rpc.deleteFile(remotePath)
|
||||
await this.projectFiles.deleteFile(remotePath)
|
||||
throw new Error(`Uploading process aborted. ${reason}`)
|
||||
},
|
||||
})
|
||||
// Disabled until https://github.com/enso-org/enso/issues/6691 is fixed.
|
||||
// Plus, handle the error here, as it should be displayed to the user.
|
||||
// uploader.assertChecksum(remotePath)
|
||||
// this.projectFiles.assertChecksum(remotePath)
|
||||
await this.file.stream().pipeTo(writableStream)
|
||||
return Ok({ source: 'Project', name: name.value })
|
||||
}
|
||||
@ -141,76 +154,4 @@ export class Uploader {
|
||||
private cleanup(name: string) {
|
||||
this.awareness.removeUpload(name)
|
||||
}
|
||||
|
||||
private async assertChecksum(path: Path): Promise<Result<void>> {
|
||||
const engineChecksum = await this.rpc.fileChecksum(path)
|
||||
if (!engineChecksum.ok) return engineChecksum
|
||||
const hexChecksum = bytesToHex(this.checksum.digest())
|
||||
if (hexChecksum != engineChecksum.value.checksum) {
|
||||
return Err(
|
||||
`Uploading file failed, checksum does not match. ${hexChecksum} != ${engineChecksum.value.checksum}`,
|
||||
)
|
||||
} else {
|
||||
return Ok()
|
||||
}
|
||||
}
|
||||
|
||||
private dataDirPath(): Path {
|
||||
return { rootId: this.projectRootId, segments: [DATA_DIR_NAME] }
|
||||
}
|
||||
|
||||
private async ensureDataDirExists() {
|
||||
const exists = await this.dataDirExists()
|
||||
if (!exists.ok) return exists
|
||||
if (exists.value) return Ok()
|
||||
return await withContext(
|
||||
() => 'When creating directory for uploaded file',
|
||||
async () => {
|
||||
return await this.rpc.createFile({
|
||||
type: 'Directory',
|
||||
name: DATA_DIR_NAME,
|
||||
path: { rootId: this.projectRootId, segments: [] },
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
private async dataDirExists(): Promise<Result<boolean>> {
|
||||
const info = await this.rpc.fileInfo(this.dataDirPath())
|
||||
if (info.ok) return Ok(info.value.attributes.kind.type == 'Directory')
|
||||
else if (
|
||||
info.error.payload.cause instanceof RemoteRpcError &&
|
||||
(info.error.payload.cause.code === ErrorCode.FILE_NOT_FOUND ||
|
||||
info.error.payload.cause.code === ErrorCode.CONTENT_ROOT_NOT_FOUND)
|
||||
) {
|
||||
return Ok(false)
|
||||
} else {
|
||||
return info
|
||||
}
|
||||
}
|
||||
|
||||
private async pickUniqueName(suggestedName: string): Promise<Result<string>> {
|
||||
const files = await this.rpc.listFiles(this.dataDirPath())
|
||||
if (!files.ok) return files
|
||||
const existingNames = new Set(files.value.paths.map((path) => path.name))
|
||||
const { stem, extension = '' } = splitFilename(suggestedName)
|
||||
let candidate = suggestedName
|
||||
let num = 1
|
||||
while (existingNames.has(candidate)) {
|
||||
candidate = `${stem}_${num}.${extension}`
|
||||
num += 1
|
||||
}
|
||||
return Ok(candidate)
|
||||
}
|
||||
}
|
||||
|
||||
/** Split filename into stem and (optional) extension. */
|
||||
function splitFilename(fileName: string): { stem: string; extension?: string } {
|
||||
const dotIndex = fileName.lastIndexOf('.')
|
||||
if (dotIndex !== -1 && dotIndex !== 0) {
|
||||
const stem = fileName.substring(0, dotIndex)
|
||||
const extension = fileName.substring(dotIndex + 1)
|
||||
return { stem, extension }
|
||||
}
|
||||
return { stem: fileName }
|
||||
}
|
||||
|
@ -70,6 +70,6 @@ export const ArgumentNameShownKey: unique symbol = Symbol.for('WidgetInput:Argum
|
||||
|
||||
.placeholder,
|
||||
.name {
|
||||
color: rgb(255 255 255 / 0.5);
|
||||
opacity: 0.6;
|
||||
}
|
||||
</style>
|
||||
|
@ -113,7 +113,7 @@ export const widgetDefinition = defineWidget(
|
||||
}
|
||||
|
||||
.name {
|
||||
color: rgb(255 255 255 / 0.5);
|
||||
opacity: 0.5;
|
||||
margin-right: var(--widget-token-pad-unit);
|
||||
}
|
||||
</style>
|
||||
|
@ -47,7 +47,7 @@ export const widgetDefinition = defineWidget(
|
||||
}
|
||||
|
||||
.token {
|
||||
color: rgb(255 255 255 / 0.33);
|
||||
opacity: 0.33;
|
||||
user-select: none;
|
||||
}
|
||||
</style>
|
||||
|
@ -189,7 +189,6 @@ export const widgetDefinition = defineWidget(
|
||||
enabled,
|
||||
connected,
|
||||
isTarget,
|
||||
isSelfArgument,
|
||||
widgetRounded: connected,
|
||||
newToConnect: !hasConnection && isCurrentEdgeHoverTarget,
|
||||
primary: props.nesting < 2,
|
||||
@ -215,6 +214,7 @@ export const widgetDefinition = defineWidget(
|
||||
|
||||
.WidgetPort.connected {
|
||||
background-color: var(--node-color-port);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.GraphEditor.draggingEdge .WidgetPort {
|
||||
@ -248,16 +248,4 @@ export const widgetDefinition = defineWidget(
|
||||
right: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
.WidgetPort.isTarget:not(.isSelfArgument):after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: -4px;
|
||||
left: 50%;
|
||||
width: 4px;
|
||||
height: 5px;
|
||||
transform: translate(-50%, 0);
|
||||
background-color: var(--node-color-port);
|
||||
z-index: -1;
|
||||
}
|
||||
</style>
|
||||
|
@ -471,7 +471,8 @@ declare module '@/providers/widgetRegistry' {
|
||||
<SizeTransition height :duration="100">
|
||||
<DropdownWidget
|
||||
v-if="dropDownInteraction.isActive() && activity == null"
|
||||
color="var(--node-color-primary)"
|
||||
color="var(--color-node-text)"
|
||||
backgroundColor="var(--color-node-background)"
|
||||
:entries="entries"
|
||||
@clickEntry="onClick"
|
||||
/>
|
||||
@ -517,7 +518,7 @@ svg.arrow {
|
||||
}
|
||||
|
||||
.activityElement {
|
||||
--background-color: var(--node-color-primary);
|
||||
--background-color: var(--color-node-primary);
|
||||
/* Above the circular menu. */
|
||||
z-index: 26;
|
||||
}
|
||||
|
@ -137,4 +137,12 @@ export const widgetDefinition = defineWidget(
|
||||
background: var(--color-widget-selection);
|
||||
}
|
||||
}
|
||||
|
||||
.selected .WidgetText {
|
||||
background: var(--color-widget-unfocus);
|
||||
&:has(> :focus) {
|
||||
outline: none;
|
||||
background: var(--color-widget-focus);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
@ -28,12 +28,12 @@ export const widgetDefinition = defineWidget(
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
white-space: pre;
|
||||
color: rgb(255 255 255 / 0.33);
|
||||
opacity: 0.33;
|
||||
|
||||
&.Ident,
|
||||
&.TextSection,
|
||||
&.Digits {
|
||||
color: white;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
&.TextSection,
|
||||
|
@ -1,21 +1,39 @@
|
||||
<script setup lang="ts">
|
||||
import type { UrlTransformer } from '@/components/MarkdownEditor/imageUrlTransformer'
|
||||
import { defineAsyncComponent } from 'vue'
|
||||
import {
|
||||
provideDocumentationImageUrlTransformer,
|
||||
type UrlTransformer,
|
||||
} from '@/components/MarkdownEditor/imageUrlTransformer'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import { ComponentInstance, computed, defineAsyncComponent, ref, toRef } from 'vue'
|
||||
import * as Y from 'yjs'
|
||||
|
||||
const props = defineProps<{
|
||||
yText: Y.Text
|
||||
content: Y.Text | string
|
||||
transformImageUrl?: UrlTransformer
|
||||
toolbarContainer: HTMLElement | undefined
|
||||
}>()
|
||||
|
||||
const inner = ref<ComponentInstance<typeof LazyMarkdownEditor>>()
|
||||
|
||||
const LazyMarkdownEditor = defineAsyncComponent(
|
||||
() => import('@/components/MarkdownEditor/MarkdownEditorImpl.vue'),
|
||||
)
|
||||
|
||||
provideDocumentationImageUrlTransformer(toRef(props, 'transformImageUrl'))
|
||||
|
||||
defineExpose({
|
||||
loaded: computed(() => inner.value != null),
|
||||
putText: (text: string) => {
|
||||
inner.value?.putText(text)
|
||||
},
|
||||
putTextAtCoord: (text: string, coords: Vec2) => {
|
||||
inner.value?.putTextAtCoords(text, coords)
|
||||
},
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<Suspense>
|
||||
<LazyMarkdownEditor v-bind="props" />
|
||||
<LazyMarkdownEditor ref="inner" v-bind="props" class="MarkdownEditor" />
|
||||
</Suspense>
|
||||
</template>
|
||||
|
@ -40,5 +40,10 @@ onUnmounted(() => {
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<img :src="data?.ok ? data.value.url : ''" :alt="alt" :title="title" />
|
||||
<img
|
||||
:src="data?.ok ? data.value.url : ''"
|
||||
:alt="alt"
|
||||
:title="title"
|
||||
:class="{ uploading: data?.ok && data.value.uploading?.value }"
|
||||
/>
|
||||
</template>
|
||||
|
@ -1,62 +1,88 @@
|
||||
<script setup lang="ts">
|
||||
import EditorRoot from '@/components/EditorRoot.vue'
|
||||
import EditorRoot from '@/components/codemirror/EditorRoot.vue'
|
||||
import { yCollab } from '@/components/codemirror/yCollab'
|
||||
import { highlightStyle } from '@/components/MarkdownEditor/highlight'
|
||||
import {
|
||||
provideDocumentationImageUrlTransformer,
|
||||
type UrlTransformer,
|
||||
} from '@/components/MarkdownEditor/imageUrlTransformer'
|
||||
import { ensoMarkdown } from '@/components/MarkdownEditor/markdown'
|
||||
import VueComponentHost from '@/components/VueComponentHost.vue'
|
||||
import { EditorState } from '@codemirror/state'
|
||||
import { assert } from '@/util/assert'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import { EditorState, Text } from '@codemirror/state'
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { minimalSetup } from 'codemirror'
|
||||
import { type ComponentInstance, onMounted, ref, toRef, useCssModule, watch } from 'vue'
|
||||
import { yCollab } from 'y-codemirror.next'
|
||||
import * as awarenessProtocol from 'y-protocols/awareness.js'
|
||||
import { type ComponentInstance, computed, onMounted, ref, toRef, useCssModule, watch } from 'vue'
|
||||
import { Awareness } from 'y-protocols/awareness.js'
|
||||
import * as Y from 'yjs'
|
||||
|
||||
const editorRoot = ref<ComponentInstance<typeof EditorRoot>>()
|
||||
|
||||
const props = defineProps<{
|
||||
yText: Y.Text
|
||||
transformImageUrl?: UrlTransformer | undefined
|
||||
toolbarContainer: HTMLElement | undefined
|
||||
content: Y.Text | string
|
||||
toolbarContainer?: HTMLElement | undefined
|
||||
}>()
|
||||
|
||||
const vueHost = ref<ComponentInstance<typeof VueComponentHost>>()
|
||||
const focused = ref(false)
|
||||
const readonly = computed(() => typeof props.content === 'string')
|
||||
const editing = computed(() => !readonly.value && focused.value)
|
||||
|
||||
provideDocumentationImageUrlTransformer(toRef(props, 'transformImageUrl'))
|
||||
|
||||
const awareness = new awarenessProtocol.Awareness(new Y.Doc())
|
||||
const awareness = new Awareness(new Y.Doc())
|
||||
const editorView = new EditorView()
|
||||
// Disable EditContext API because of https://github.com/codemirror/dev/issues/1458.
|
||||
;(EditorView as any).EDIT_CONTEXT = false
|
||||
const constantExtensions = [minimalSetup, highlightStyle(useCssModule()), EditorView.lineWrapping]
|
||||
|
||||
watch([vueHost, toRef(props, 'yText')], ([vueHost, yText]) => {
|
||||
watch([vueHost, toRef(props, 'content')], ([vueHost, content]) => {
|
||||
if (!vueHost) return
|
||||
editorView.setState(
|
||||
EditorState.create({
|
||||
doc: yText.toString(),
|
||||
extensions: [...constantExtensions, ensoMarkdown({ vueHost }), yCollab(yText, awareness)],
|
||||
}),
|
||||
)
|
||||
let doc = ''
|
||||
const extensions = [...constantExtensions, ensoMarkdown({ vueHost })]
|
||||
if (typeof content === 'string') {
|
||||
doc = content
|
||||
} else {
|
||||
assert(content.doc !== null)
|
||||
const yTextWithDoc: Y.Text & { doc: Y.Doc } = content as any
|
||||
doc = content.toString()
|
||||
extensions.push(yCollab(yTextWithDoc, awareness))
|
||||
}
|
||||
editorView.setState(EditorState.create({ doc, extensions }))
|
||||
})
|
||||
|
||||
onMounted(() => {
|
||||
const content = editorView.dom.getElementsByClassName('cm-content')[0]!
|
||||
content.addEventListener('focusin', () => (editing.value = true))
|
||||
// Enable rendering the line containing the current cursor in `editing` mode if focus enters the element *inside* the
|
||||
// scroll area--if we attached the handler to the editor root, clicking the scrollbar would cause editing mode to be
|
||||
// activated.
|
||||
editorView.dom
|
||||
.getElementsByClassName('cm-content')[0]!
|
||||
.addEventListener('focusin', () => (focused.value = true))
|
||||
editorRoot.value?.rootElement?.prepend(editorView.dom)
|
||||
})
|
||||
|
||||
const editing = ref(false)
|
||||
/**
|
||||
* Replace text in given document range with `text`, putting text cursor after inserted text.
|
||||
*
|
||||
* If text contains multiple lines, it should use '\n', not '\r\n' for line endings.
|
||||
*/
|
||||
function putTextAt(text: string, from: number, to: number) {
|
||||
const insert = Text.of(text.split('\n'))
|
||||
editorView.dispatch({
|
||||
changes: { from, to, insert },
|
||||
selection: { anchor: from + insert.length },
|
||||
})
|
||||
}
|
||||
|
||||
defineExpose({
|
||||
putText: (text: string) => {
|
||||
const range = editorView.state.selection.main
|
||||
putTextAt(text, range.from, range.to)
|
||||
},
|
||||
putTextAt,
|
||||
putTextAtCoords: (text: string, coords: Vec2) => {
|
||||
const pos = editorView.posAtCoords(coords, false)
|
||||
putTextAt(text, pos, pos)
|
||||
},
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<EditorRoot
|
||||
ref="editorRoot"
|
||||
class="MarkdownEditor"
|
||||
:class="{ editing }"
|
||||
@focusout="editing = false"
|
||||
/>
|
||||
<EditorRoot ref="editorRoot" v-bind="$attrs" :class="{ editing }" @focusout="focused = false" />
|
||||
<VueComponentHost ref="vueHost" />
|
||||
</template>
|
||||
|
||||
@ -65,19 +91,14 @@ const editing = ref(false)
|
||||
font-family: var(--font-sans);
|
||||
}
|
||||
|
||||
:deep(.cm-scroller) {
|
||||
/* Prevent touchpad back gesture, which can be triggered while panning. */
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
|
||||
.EditorRoot :deep(.cm-editor) {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
:deep(.cm-editor) {
|
||||
opacity: 1;
|
||||
color: black;
|
||||
font-size: 12px;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
:deep(img.uploading) {
|
||||
opacity: 0.5;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
@ -0,0 +1,74 @@
|
||||
<script setup lang="ts">
|
||||
import MarkdownEditorImpl from '@/components/MarkdownEditor/MarkdownEditorImpl.vue'
|
||||
import type { Text } from '@codemirror/state'
|
||||
import { SyntaxNode, TreeCursor } from '@lezer/common'
|
||||
import { computed } from 'vue'
|
||||
|
||||
const { source, parsed } = defineProps<{
|
||||
source: Text
|
||||
parsed: SyntaxNode
|
||||
}>()
|
||||
|
||||
function parseRow(cursor: TreeCursor, output: string[]) {
|
||||
if (!cursor.firstChild()) return
|
||||
do {
|
||||
if (cursor.name === 'TableCell') {
|
||||
output.push(source.sliceString(cursor.from, cursor.to))
|
||||
} else if (cursor.name !== 'TableDelimiter') {
|
||||
console.warn('Unexpected in table row:', cursor.name)
|
||||
}
|
||||
} while (cursor.nextSibling())
|
||||
cursor.parent()
|
||||
}
|
||||
|
||||
const content = computed(() => {
|
||||
const headers: string[] = []
|
||||
const rows: string[][] = []
|
||||
const cursor = parsed.cursor()
|
||||
if (cursor.firstChild()) {
|
||||
do {
|
||||
if (cursor.name === 'TableRow') {
|
||||
const newRow: string[] = []
|
||||
parseRow(cursor, newRow)
|
||||
rows.push(newRow)
|
||||
} else if (cursor.name === 'TableHeader') {
|
||||
parseRow(cursor, headers)
|
||||
} else if (cursor.name !== 'TableDelimiter') {
|
||||
console.warn('Unexpected at top level of table:', cursor.name)
|
||||
}
|
||||
} while (cursor.nextSibling())
|
||||
}
|
||||
return { headers, rows }
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th v-for="(cell, c) in content.headers" :key="c" class="cell">
|
||||
<MarkdownEditorImpl :content="cell" />
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="tableBody">
|
||||
<tr v-for="(row, r) in content.rows" :key="r" class="row">
|
||||
<td v-for="(cell, c) in row" :key="c" class="cell">
|
||||
<MarkdownEditorImpl :content="cell" />
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.cell {
|
||||
border: 1px solid #dddddd;
|
||||
}
|
||||
.tableBody .row:nth-of-type(even) {
|
||||
background-color: #f3f3f3;
|
||||
}
|
||||
:deep(.cm-line) {
|
||||
padding-right: 6px;
|
||||
}
|
||||
</style>
|
@ -104,6 +104,21 @@ test.each([
|
||||
alt: '',
|
||||
},
|
||||
},
|
||||
{
|
||||
markdown: '![](<https://www.example.com/The image.avif>)',
|
||||
image: {
|
||||
src: 'https://www.example.com/The image.avif',
|
||||
alt: '',
|
||||
},
|
||||
},
|
||||
{
|
||||
markdown: '![](<https://www.example.com/The image.avif)',
|
||||
image: null,
|
||||
},
|
||||
{
|
||||
markdown: '![](https://www.example.com/The image.avif)',
|
||||
image: null,
|
||||
},
|
||||
{
|
||||
markdown: '![Image](https://www.example.com/image.avif',
|
||||
image: null,
|
||||
|
@ -1,9 +1,22 @@
|
||||
import { createContextStore } from '@/providers'
|
||||
import type { ToValue } from '@/util/reactivity'
|
||||
import { toValue } from 'vue'
|
||||
import { Ref, toValue } from 'vue'
|
||||
import { mapOk, Ok, type Result } from 'ydoc-shared/util/data/result'
|
||||
|
||||
export type TransformUrlResult = Result<{ url: string; dispose?: () => void }>
|
||||
/**
|
||||
* A transformed URL.
|
||||
*
|
||||
* Once the returned URL is not used anymore, `dispose` callback is called allowing release
|
||||
* any resource under that URL.
|
||||
*
|
||||
* `uploading` is set to true while the image is being uploaded to its target destination
|
||||
* (as part of pasting image, for example).
|
||||
*/
|
||||
export type TransformUrlResult = Result<{
|
||||
url: string
|
||||
dispose?: () => void
|
||||
uploading?: Ref<boolean>
|
||||
}>
|
||||
export type UrlTransformer = (url: string) => Promise<TransformUrlResult>
|
||||
|
||||
export {
|
||||
@ -22,11 +35,17 @@ type Url = string
|
||||
export interface ResourceInfo<T> {
|
||||
location: T
|
||||
uniqueId: ResourceId
|
||||
uploading?: Ref<boolean>
|
||||
}
|
||||
export type ResourceLocator<T> = (url: Url) => Promise<Result<ResourceInfo<T>> | undefined>
|
||||
export type ResourceFetcher<T> = (locator: T) => Promise<Result<Blob>>
|
||||
|
||||
/** TODO: Add docs */
|
||||
/**
|
||||
* Create {@link UrlTransformer} which fetches and caches the image. Returns a URL created
|
||||
* with `URL.createObjectURL`.
|
||||
*
|
||||
* May be used in cases, when the image is not available for browser through HTTP protocol.
|
||||
*/
|
||||
export function fetcherUrlTransformer<ResourceLocation>(
|
||||
locateResource: ResourceLocator<ResourceLocation>,
|
||||
fetchResource: ResourceFetcher<ResourceLocation>,
|
||||
@ -48,7 +67,7 @@ export function fetcherUrlTransformer<ResourceLocation>(
|
||||
} else if (!resource.ok) {
|
||||
return resource
|
||||
} else {
|
||||
const { uniqueId, location } = resource.value
|
||||
const { uniqueId, location, uploading } = resource.value
|
||||
const result = await (allocatedUrls.get(uniqueId) ?? startFetch(uniqueId, location))
|
||||
if (!result.ok) {
|
||||
// Changes to external state may allow a future attempt to succeed.
|
||||
@ -64,6 +83,7 @@ export function fetcherUrlTransformer<ResourceLocation>(
|
||||
allocatedUrls.delete(uniqueId)
|
||||
}
|
||||
},
|
||||
uploading,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,98 @@
|
||||
import { markdownDecorators } from '@/components/MarkdownEditor/markdown/decoration'
|
||||
import { markdown } from '@/components/MarkdownEditor/markdown/parse'
|
||||
import type { VueHost } from '@/components/VueComponentHost.vue'
|
||||
import { markdown as markdownExtension } from '@codemirror/lang-markdown'
|
||||
import {
|
||||
defineLanguageFacet,
|
||||
foldNodeProp,
|
||||
foldService,
|
||||
indentNodeProp,
|
||||
Language,
|
||||
languageDataProp,
|
||||
syntaxTree,
|
||||
} from '@codemirror/language'
|
||||
import type { Extension } from '@codemirror/state'
|
||||
import { NodeProp, type NodeType, type Parser, type SyntaxNode } from '@lezer/common'
|
||||
import { markdownParser } from 'ydoc-shared/ast/ensoMarkdown'
|
||||
|
||||
/** Markdown extension, with customizations for Enso. */
|
||||
/** CodeMirror Extension for the Enso Markdown dialect. */
|
||||
export function ensoMarkdown({ vueHost }: { vueHost: VueHost }): Extension {
|
||||
return [markdown(), markdownDecorators({ vueHost })]
|
||||
return [
|
||||
markdownExtension({
|
||||
base: mkLang(
|
||||
markdownParser.configure([
|
||||
commonmarkCodemirrorLanguageExtension,
|
||||
tableCodemirrorLanguageExtension,
|
||||
]),
|
||||
),
|
||||
}),
|
||||
markdownDecorators({ vueHost }),
|
||||
]
|
||||
}
|
||||
|
||||
function mkLang(parser: Parser) {
|
||||
return new Language(data, parser, [headerIndent], 'markdown')
|
||||
}
|
||||
|
||||
const data = defineLanguageFacet({ commentTokens: { block: { open: '<!--', close: '-->' } } })
|
||||
|
||||
const headingProp = new NodeProp<number>()
|
||||
|
||||
const commonmarkCodemirrorLanguageExtension = {
|
||||
props: [
|
||||
foldNodeProp.add((type) => {
|
||||
return !type.is('Block') || type.is('Document') || isHeading(type) != null || isList(type) ?
|
||||
undefined
|
||||
: (tree, state) => ({ from: state.doc.lineAt(tree.from).to, to: tree.to })
|
||||
}),
|
||||
headingProp.add(isHeading),
|
||||
indentNodeProp.add({
|
||||
Document: () => null,
|
||||
}),
|
||||
languageDataProp.add({
|
||||
Document: data,
|
||||
}),
|
||||
],
|
||||
}
|
||||
|
||||
function isHeading(type: NodeType) {
|
||||
const match = /^(?:ATX|Setext)Heading(\d)$/.exec(type.name)
|
||||
return match ? +match[1]! : undefined
|
||||
}
|
||||
|
||||
function isList(type: NodeType) {
|
||||
return type.name == 'OrderedList' || type.name == 'BulletList'
|
||||
}
|
||||
|
||||
function findSectionEnd(headerNode: SyntaxNode, level: number) {
|
||||
let last = headerNode
|
||||
for (;;) {
|
||||
const next = last.nextSibling
|
||||
let heading
|
||||
if (!next || ((heading = isHeading(next.type)) != null && heading <= level)) break
|
||||
last = next
|
||||
}
|
||||
return last.to
|
||||
}
|
||||
|
||||
const headerIndent = foldService.of((state, start, end) => {
|
||||
for (
|
||||
let node: SyntaxNode | null = syntaxTree(state).resolveInner(end, -1);
|
||||
node;
|
||||
node = node.parent
|
||||
) {
|
||||
if (node.from < start) break
|
||||
const heading = node.type.prop(headingProp)
|
||||
if (heading == null) continue
|
||||
const upto = findSectionEnd(node, heading)
|
||||
if (upto > end) return { from: end, to: upto }
|
||||
}
|
||||
return null
|
||||
})
|
||||
|
||||
const tableCodemirrorLanguageExtension = {
|
||||
props: [
|
||||
foldNodeProp.add({
|
||||
Table: (tree, state) => ({ from: state.doc.lineAt(tree.from).to, to: tree.to }),
|
||||
}),
|
||||
],
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import DocumentationImage from '@/components/MarkdownEditor/DocumentationImage.vue'
|
||||
import TableEditor from '@/components/MarkdownEditor/TableEditor.vue'
|
||||
import type { VueHost } from '@/components/VueComponentHost.vue'
|
||||
import { syntaxTree } from '@codemirror/language'
|
||||
import { type EditorSelection, type Extension, RangeSetBuilder, type Text } from '@codemirror/state'
|
||||
@ -19,6 +20,7 @@ export function markdownDecorators({ vueHost }: { vueHost: VueHost }): Extension
|
||||
const stateDecorator = new TreeStateDecorator(vueHost, [
|
||||
decorateImageWithClass,
|
||||
decorateImageWithRendered,
|
||||
decorateTable,
|
||||
])
|
||||
const stateDecoratorExt = EditorView.decorations.compute(['doc'], (state) =>
|
||||
stateDecorator.decorate(syntaxTree(state), state.doc),
|
||||
@ -144,12 +146,8 @@ function parseLinkLike(node: SyntaxNode, doc: Text) {
|
||||
if (!textOpen) return
|
||||
const textClose = textOpen.nextSibling // ]
|
||||
if (!textClose) return
|
||||
const urlOpen = textClose.nextSibling // (
|
||||
// The parser accepts partial links such as `[Missing url]`.
|
||||
if (!urlOpen) return
|
||||
const urlNode = urlOpen.nextSibling
|
||||
// If the URL is empty, this will be the closing 'LinkMark'.
|
||||
if (urlNode?.name !== 'URL') return
|
||||
const urlNode = findNextSiblingNamed(textClose, 'URL')
|
||||
if (!urlNode) return
|
||||
return {
|
||||
textFrom: textOpen.to,
|
||||
textTo: textClose.from,
|
||||
@ -268,3 +266,76 @@ class ImageWidget extends WidgetType {
|
||||
this.container = undefined
|
||||
}
|
||||
}
|
||||
|
||||
function findNextSiblingNamed(node: SyntaxNode, name: string) {
|
||||
for (let sibling = node.nextSibling; sibling != null; sibling = sibling.nextSibling) {
|
||||
if (sibling.name === name) {
|
||||
return sibling
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === Tables ===
|
||||
|
||||
function decorateTable(
|
||||
nodeRef: SyntaxNodeRef,
|
||||
doc: Text,
|
||||
emitDecoration: (from: number, to: number, deco: Decoration) => void,
|
||||
vueHost: VueHost,
|
||||
) {
|
||||
if (nodeRef.name === 'Table') {
|
||||
const source = doc //.slice(nodeRef.from, nodeRef.to)
|
||||
const parsed = nodeRef.node
|
||||
const widget = new TableWidget({ source, parsed }, vueHost)
|
||||
emitDecoration(
|
||||
nodeRef.from,
|
||||
nodeRef.to,
|
||||
Decoration.replace({
|
||||
widget,
|
||||
// Ensure the cursor is drawn relative to the content before the widget.
|
||||
// If it is drawn relative to the widget, it will be hidden when the widget is hidden (i.e. during editing).
|
||||
side: 1,
|
||||
block: true,
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class TableWidget extends WidgetType {
|
||||
private container: HTMLElement | undefined
|
||||
private vueHostRegistration: { unregister: () => void } | undefined
|
||||
|
||||
constructor(
|
||||
private readonly props: { source: Text; parsed: SyntaxNode },
|
||||
private readonly vueHost: VueHost,
|
||||
) {
|
||||
super()
|
||||
}
|
||||
|
||||
override get estimatedHeight() {
|
||||
return -1
|
||||
}
|
||||
|
||||
override toDOM(): HTMLElement {
|
||||
if (!this.container) {
|
||||
const container = markRaw(document.createElement('div'))
|
||||
container.className = 'cm-table-editor'
|
||||
this.vueHostRegistration = this.vueHost.register(
|
||||
() =>
|
||||
h(TableEditor, {
|
||||
source: this.props.source,
|
||||
parsed: this.props.parsed,
|
||||
onEdit: () => console.log('onEdit'),
|
||||
}),
|
||||
container,
|
||||
)
|
||||
this.container = container
|
||||
}
|
||||
return this.container
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.vueHostRegistration?.unregister()
|
||||
this.container = undefined
|
||||
}
|
||||
}
|
||||
|
@ -26,4 +26,16 @@ defineExpose({ rootElement })
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
:deep(.cm-scroller) {
|
||||
/* Prevent touchpad back gesture, which can be triggered while panning. */
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
|
||||
:deep(.cm-editor) {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
outline: none;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,28 @@
|
||||
import { EditorSelection } from '@codemirror/state'
|
||||
import { type EditorView } from '@codemirror/view'
|
||||
|
||||
/** Returns an API for the editor content, used by the integration tests. */
|
||||
export function testSupport(editorView: EditorView) {
|
||||
return {
|
||||
textContent: () => editorView.state.doc.toString(),
|
||||
textLength: () => editorView.state.doc.length,
|
||||
indexOf: (substring: string, position?: number) =>
|
||||
editorView.state.doc.toString().indexOf(substring, position),
|
||||
placeCursor: (at: number) => {
|
||||
editorView.dispatch({ selection: EditorSelection.create([EditorSelection.cursor(at)]) })
|
||||
},
|
||||
select: (from: number, to: number) => {
|
||||
editorView.dispatch({ selection: EditorSelection.create([EditorSelection.range(from, to)]) })
|
||||
},
|
||||
selectAndReplace: (from: number, to: number, replaceWith: string) => {
|
||||
editorView.dispatch({ selection: EditorSelection.create([EditorSelection.range(from, to)]) })
|
||||
editorView.dispatch(editorView.state.update(editorView.state.replaceSelection(replaceWith)))
|
||||
},
|
||||
writeText: (text: string, from: number) => {
|
||||
editorView.dispatch({
|
||||
changes: [{ from: from, insert: text }],
|
||||
selection: { anchor: from + text.length },
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
@ -0,0 +1,65 @@
|
||||
/**
|
||||
* @file CodeMirror extension for synchronizing with a Yjs Text object.
|
||||
* Based on <https://github.com/yjs/y-codemirror.next>. Initial changes from upstream:
|
||||
* - Translated from JSDoc-typed JS to Typescript.
|
||||
* - Refactored for stricter typing.
|
||||
* - Changes to match project code style.
|
||||
*/
|
||||
|
||||
import * as cmView from '@codemirror/view'
|
||||
import { type Awareness } from 'y-protocols/awareness.js'
|
||||
import * as Y from 'yjs'
|
||||
import { YRange } from './y-range'
|
||||
import { yRemoteSelections, yRemoteSelectionsTheme } from './y-remote-selections'
|
||||
import { YSyncConfig, ySync, ySyncAnnotation, ySyncFacet } from './y-sync'
|
||||
import {
|
||||
YUndoManagerConfig,
|
||||
redo,
|
||||
undo,
|
||||
yUndoManager,
|
||||
yUndoManagerFacet,
|
||||
yUndoManagerKeymap,
|
||||
} from './y-undomanager'
|
||||
export {
|
||||
YRange,
|
||||
YSyncConfig,
|
||||
yRemoteSelections,
|
||||
yRemoteSelectionsTheme,
|
||||
ySync,
|
||||
ySyncAnnotation,
|
||||
ySyncFacet,
|
||||
yUndoManagerKeymap,
|
||||
}
|
||||
|
||||
/* CodeMirror Extension for synchronizing the editor state with a {@link Y.Text}. */
|
||||
export const yCollab = (
|
||||
ytext: Y.Text & { doc: Y.Doc },
|
||||
awareness: Awareness | null,
|
||||
{
|
||||
undoManager = new Y.UndoManager(ytext),
|
||||
}: {
|
||||
/** Set to false to disable the undo-redo plugin */
|
||||
undoManager?: Y.UndoManager | false
|
||||
} = {},
|
||||
) => {
|
||||
const ySyncConfig = new YSyncConfig(ytext, awareness)
|
||||
const plugins = [ySyncFacet.of(ySyncConfig), ySync]
|
||||
if (awareness) {
|
||||
plugins.push(yRemoteSelectionsTheme, yRemoteSelections)
|
||||
}
|
||||
if (undoManager !== false) {
|
||||
// By default, only track changes that are produced by the sync plugin (local edits)
|
||||
plugins.push(
|
||||
yUndoManagerFacet.of(new YUndoManagerConfig(undoManager)),
|
||||
yUndoManager,
|
||||
cmView.EditorView.domEventHandlers({
|
||||
beforeinput(e, view) {
|
||||
if (e.inputType === 'historyUndo') return undo(view)
|
||||
if (e.inputType === 'historyRedo') return redo(view)
|
||||
return false
|
||||
},
|
||||
}),
|
||||
)
|
||||
}
|
||||
return plugins
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
import * as Y from 'yjs'
|
||||
|
||||
/**
|
||||
* Defines a range on text using relative positions that can be transformed back to
|
||||
* absolute positions. (https://docs.yjs.dev/api/relative-positions)
|
||||
*/
|
||||
export class YRange {
|
||||
/** TODO: Add docs */
|
||||
constructor(
|
||||
readonly yanchor: Y.RelativePosition,
|
||||
readonly yhead: Y.RelativePosition,
|
||||
) {
|
||||
this.yanchor = yanchor
|
||||
this.yhead = yhead
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
toJSON() {
|
||||
return {
|
||||
yanchor: Y.relativePositionToJSON(this.yanchor),
|
||||
yhead: Y.relativePositionToJSON(this.yhead),
|
||||
}
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
static fromJSON(json: { yanchor: unknown; yhead: unknown }) {
|
||||
return new YRange(
|
||||
Y.createRelativePositionFromJSON(json.yanchor),
|
||||
Y.createRelativePositionFromJSON(json.yhead),
|
||||
)
|
||||
}
|
||||
}
|
@ -0,0 +1,264 @@
|
||||
import * as cmState from '@codemirror/state'
|
||||
import * as cmView from '@codemirror/view'
|
||||
import * as dom from 'lib0/dom'
|
||||
import * as math from 'lib0/math'
|
||||
import * as pair from 'lib0/pair'
|
||||
import { Awareness } from 'y-protocols/awareness.js'
|
||||
import { assert } from 'ydoc-shared/util/assert'
|
||||
import * as Y from 'yjs'
|
||||
import { type YSyncConfig, ySyncFacet } from './y-sync'
|
||||
|
||||
export const yRemoteSelectionsTheme = cmView.EditorView.baseTheme({
|
||||
'.cm-ySelection': {},
|
||||
'.cm-yLineSelection': {
|
||||
padding: 0,
|
||||
margin: '0px 2px 0px 4px',
|
||||
},
|
||||
'.cm-ySelectionCaret': {
|
||||
position: 'relative',
|
||||
borderLeft: '1px solid black',
|
||||
borderRight: '1px solid black',
|
||||
marginLeft: '-1px',
|
||||
marginRight: '-1px',
|
||||
boxSizing: 'border-box',
|
||||
display: 'inline',
|
||||
},
|
||||
'.cm-ySelectionCaretDot': {
|
||||
borderRadius: '50%',
|
||||
position: 'absolute',
|
||||
width: '.4em',
|
||||
height: '.4em',
|
||||
top: '-.2em',
|
||||
left: '-.2em',
|
||||
backgroundColor: 'inherit',
|
||||
transition: 'transform .3s ease-in-out',
|
||||
boxSizing: 'border-box',
|
||||
},
|
||||
'.cm-ySelectionCaret:hover > .cm-ySelectionCaretDot': {
|
||||
transformOrigin: 'bottom center',
|
||||
transform: 'scale(0)',
|
||||
},
|
||||
'.cm-ySelectionInfo': {
|
||||
position: 'absolute',
|
||||
top: '-1.05em',
|
||||
left: '-1px',
|
||||
fontSize: '.75em',
|
||||
fontFamily: 'serif',
|
||||
fontStyle: 'normal',
|
||||
fontWeight: 'normal',
|
||||
lineHeight: 'normal',
|
||||
userSelect: 'none',
|
||||
color: 'white',
|
||||
paddingLeft: '2px',
|
||||
paddingRight: '2px',
|
||||
zIndex: 101,
|
||||
transition: 'opacity .3s ease-in-out',
|
||||
backgroundColor: 'inherit',
|
||||
// these should be separate
|
||||
opacity: 0,
|
||||
transitionDelay: '0s',
|
||||
whiteSpace: 'nowrap',
|
||||
},
|
||||
'.cm-ySelectionCaret:hover > .cm-ySelectionInfo': {
|
||||
opacity: 1,
|
||||
transitionDelay: '0s',
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* @todo specify the users that actually changed. Currently, we recalculate positions for every user.
|
||||
*/
|
||||
const yRemoteSelectionsAnnotation = cmState.Annotation.define<number[]>()
|
||||
|
||||
class YRemoteCaretWidget extends cmView.WidgetType {
|
||||
constructor(
|
||||
readonly color: string,
|
||||
readonly name: string,
|
||||
) {
|
||||
super()
|
||||
}
|
||||
|
||||
toDOM() {
|
||||
return dom.element(
|
||||
'span',
|
||||
[
|
||||
pair.create('class', 'cm-ySelectionCaret'),
|
||||
pair.create('style', `background-color: ${this.color}; border-color: ${this.color}`),
|
||||
],
|
||||
[
|
||||
dom.text('\u2060'),
|
||||
dom.element('div', [pair.create('class', 'cm-ySelectionCaretDot')]),
|
||||
dom.text('\u2060'),
|
||||
dom.element('div', [pair.create('class', 'cm-ySelectionInfo')], [dom.text(this.name)]),
|
||||
dom.text('\u2060'),
|
||||
],
|
||||
) as HTMLElement
|
||||
}
|
||||
|
||||
override eq(widget: unknown) {
|
||||
assert(widget instanceof YRemoteCaretWidget)
|
||||
return widget.color === this.color
|
||||
}
|
||||
|
||||
compare(widget: unknown) {
|
||||
assert(widget instanceof YRemoteCaretWidget)
|
||||
return widget.color === this.color
|
||||
}
|
||||
|
||||
override updateDOM() {
|
||||
return false
|
||||
}
|
||||
|
||||
override get estimatedHeight() {
|
||||
return -1
|
||||
}
|
||||
|
||||
override ignoreEvent() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export class YRemoteSelectionsPluginValue {
|
||||
private readonly conf: YSyncConfig
|
||||
private readonly _awareness: Awareness
|
||||
decorations: cmView.DecorationSet
|
||||
private readonly _listener: ({ added, updated, removed }: any) => void
|
||||
|
||||
/** TODO: Add docs */
|
||||
constructor(view: cmView.EditorView) {
|
||||
this.conf = view.state.facet(ySyncFacet)
|
||||
assert(this.conf.awareness != null)
|
||||
this._listener = ({ added, updated, removed }: any) => {
|
||||
const clients = added.concat(updated).concat(removed)
|
||||
if (clients.findIndex((id: any) => id !== this._awareness.doc.clientID) >= 0) {
|
||||
view.dispatch({ annotations: [yRemoteSelectionsAnnotation.of([])] })
|
||||
}
|
||||
}
|
||||
this._awareness = this.conf.awareness
|
||||
this._awareness.on('change', this._listener)
|
||||
this.decorations = cmState.RangeSet.of([])
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
destroy() {
|
||||
this._awareness.off('change', this._listener)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
update(update: cmView.ViewUpdate) {
|
||||
const ytext = this.conf.ytext
|
||||
const ydoc = ytext.doc
|
||||
const awareness = this._awareness
|
||||
const decorations: cmState.Range<cmView.Decoration>[] = []
|
||||
const localAwarenessState = this._awareness.getLocalState()
|
||||
|
||||
// set local awareness state (update cursors)
|
||||
if (localAwarenessState != null) {
|
||||
const hasFocus = update.view.hasFocus && update.view.dom.ownerDocument.hasFocus()
|
||||
const sel = hasFocus ? update.state.selection.main : null
|
||||
const currentAnchor =
|
||||
localAwarenessState.cursor == null ?
|
||||
null
|
||||
: Y.createRelativePositionFromJSON(localAwarenessState.cursor.anchor)
|
||||
const currentHead =
|
||||
localAwarenessState.cursor == null ?
|
||||
null
|
||||
: Y.createRelativePositionFromJSON(localAwarenessState.cursor.head)
|
||||
|
||||
if (sel != null) {
|
||||
const anchor = Y.createRelativePositionFromTypeIndex(ytext, sel.anchor)
|
||||
const head = Y.createRelativePositionFromTypeIndex(ytext, sel.head)
|
||||
if (
|
||||
localAwarenessState.cursor == null ||
|
||||
!Y.compareRelativePositions(currentAnchor, anchor) ||
|
||||
!Y.compareRelativePositions(currentHead, head)
|
||||
) {
|
||||
awareness.setLocalStateField('cursor', {
|
||||
anchor,
|
||||
head,
|
||||
})
|
||||
}
|
||||
} else if (localAwarenessState.cursor != null && hasFocus) {
|
||||
awareness.setLocalStateField('cursor', null)
|
||||
}
|
||||
}
|
||||
|
||||
// update decorations (remote selections)
|
||||
awareness.getStates().forEach((state, clientid) => {
|
||||
if (clientid === awareness.doc.clientID) {
|
||||
return
|
||||
}
|
||||
const cursor = state.cursor
|
||||
if (cursor == null || cursor.anchor == null || cursor.head == null) {
|
||||
return
|
||||
}
|
||||
const anchor = Y.createAbsolutePositionFromRelativePosition(cursor.anchor, ydoc)
|
||||
const head = Y.createAbsolutePositionFromRelativePosition(cursor.head, ydoc)
|
||||
if (anchor == null || head == null || anchor.type !== ytext || head.type !== ytext) {
|
||||
return
|
||||
}
|
||||
const { color = '#30bced', name = 'Anonymous' } = state.user || {}
|
||||
const colorLight = (state.user && state.user.colorLight) || color + '33'
|
||||
const start = math.min(anchor.index, head.index)
|
||||
const end = math.max(anchor.index, head.index)
|
||||
const startLine = update.view.state.doc.lineAt(start)
|
||||
const endLine = update.view.state.doc.lineAt(end)
|
||||
if (startLine.number === endLine.number) {
|
||||
// selected content in a single line.
|
||||
decorations.push({
|
||||
from: start,
|
||||
to: end,
|
||||
value: cmView.Decoration.mark({
|
||||
attributes: { style: `background-color: ${colorLight}` },
|
||||
class: 'cm-ySelection',
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
// selected content in multiple lines
|
||||
// first, render text-selection in the first line
|
||||
decorations.push({
|
||||
from: start,
|
||||
to: startLine.from + startLine.length,
|
||||
value: cmView.Decoration.mark({
|
||||
attributes: { style: `background-color: ${colorLight}` },
|
||||
class: 'cm-ySelection',
|
||||
}),
|
||||
})
|
||||
// render text-selection in the last line
|
||||
decorations.push({
|
||||
from: endLine.from,
|
||||
to: end,
|
||||
value: cmView.Decoration.mark({
|
||||
attributes: { style: `background-color: ${colorLight}` },
|
||||
class: 'cm-ySelection',
|
||||
}),
|
||||
})
|
||||
for (let i = startLine.number + 1; i < endLine.number; i++) {
|
||||
const linePos = update.view.state.doc.line(i).from
|
||||
decorations.push({
|
||||
from: linePos,
|
||||
to: linePos,
|
||||
value: cmView.Decoration.line({
|
||||
attributes: { style: `background-color: ${colorLight}`, class: 'cm-yLineSelection' },
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
decorations.push({
|
||||
from: head.index,
|
||||
to: head.index,
|
||||
value: cmView.Decoration.widget({
|
||||
side: head.index - anchor.index > 0 ? -1 : 1, // the local cursor should be rendered outside the remote selection
|
||||
block: false,
|
||||
widget: new YRemoteCaretWidget(color, name),
|
||||
}),
|
||||
})
|
||||
})
|
||||
this.decorations = cmView.Decoration.set(decorations, true)
|
||||
}
|
||||
}
|
||||
|
||||
export const yRemoteSelections = cmView.ViewPlugin.fromClass(YRemoteSelectionsPluginValue, {
|
||||
decorations: (v) => v.decorations,
|
||||
})
|
156
app/gui/src/project-view/components/codemirror/yCollab/y-sync.ts
Normal file
156
app/gui/src/project-view/components/codemirror/yCollab/y-sync.ts
Normal file
@ -0,0 +1,156 @@
|
||||
import * as cmState from '@codemirror/state'
|
||||
import * as cmView from '@codemirror/view'
|
||||
import { type Awareness } from 'y-protocols/awareness.js'
|
||||
import { assertDefined } from 'ydoc-shared/util/assert'
|
||||
import * as Y from 'yjs'
|
||||
import { YRange } from './y-range'
|
||||
|
||||
/** TODO: Add docs */
|
||||
export class YSyncConfig {
|
||||
readonly undoManager: Y.UndoManager
|
||||
readonly ytext: Y.Text & { doc: Y.Doc }
|
||||
|
||||
/** TODO: Add docs */
|
||||
constructor(
|
||||
ytext: Y.Text & { doc: Y.Doc },
|
||||
readonly awareness: Awareness | null,
|
||||
) {
|
||||
this.ytext = ytext as Y.Text & { doc: Y.Doc }
|
||||
this.undoManager = new Y.UndoManager(ytext)
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to transform an absolute index position to a Yjs-based relative position
|
||||
* (https://docs.yjs.dev/api/relative-positions).
|
||||
*
|
||||
* A relative position can be transformed back to an absolute position even after the document has changed. The position is
|
||||
* automatically adapted. This does not require any position transformations. Relative positions are computed based on
|
||||
* the internal Yjs document model. Peers that share content through Yjs are guaranteed that their positions will always
|
||||
* synced up when using relatve positions.
|
||||
*
|
||||
* ```js
|
||||
* import { ySyncFacet } from 'y-codemirror'
|
||||
*
|
||||
* ..
|
||||
* const ysync = view.state.facet(ySyncFacet)
|
||||
* // transform an absolute index position to a ypos
|
||||
* const ypos = ysync.getYPos(3)
|
||||
* // transform the ypos back to an absolute position
|
||||
* ysync.fromYPos(ypos) // => 3
|
||||
* ```
|
||||
*
|
||||
* It cannot be guaranteed that absolute index positions can be synced up between peers.
|
||||
* This might lead to undesired behavior when implementing features that require that all peers see the
|
||||
* same marked range (e.g. a comment plugin).
|
||||
*/
|
||||
toYPos(pos: number, assoc = 0) {
|
||||
return Y.createRelativePositionFromTypeIndex(this.ytext, pos, assoc)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
fromYPos(rpos: Y.RelativePosition | object) {
|
||||
const pos = Y.createAbsolutePositionFromRelativePosition(
|
||||
Y.createRelativePositionFromJSON(rpos),
|
||||
this.ytext.doc,
|
||||
)
|
||||
if (pos == null || pos.type !== this.ytext) {
|
||||
throw new Error(
|
||||
'[y-codemirror] The position you want to retrieve was created by a different document',
|
||||
)
|
||||
}
|
||||
return {
|
||||
pos: pos.index,
|
||||
assoc: pos.assoc,
|
||||
}
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
toYRange(range: cmState.SelectionRange) {
|
||||
const assoc = range.assoc
|
||||
const yanchor = this.toYPos(range.anchor, assoc)
|
||||
const yhead = this.toYPos(range.head, assoc)
|
||||
return new YRange(yanchor, yhead)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
fromYRange(yrange: YRange) {
|
||||
const anchor = this.fromYPos(yrange.yanchor)
|
||||
const head = this.fromYPos(yrange.yhead)
|
||||
if (anchor.pos === head.pos) {
|
||||
return cmState.EditorSelection.cursor(head.pos, head.assoc)
|
||||
}
|
||||
return cmState.EditorSelection.range(anchor.pos, head.pos)
|
||||
}
|
||||
}
|
||||
|
||||
export const ySyncFacet = cmState.Facet.define<YSyncConfig, YSyncConfig>({
|
||||
combine(inputs) {
|
||||
return inputs[inputs.length - 1]!
|
||||
},
|
||||
})
|
||||
|
||||
export const ySyncAnnotation = cmState.Annotation.define<YSyncConfig>()
|
||||
|
||||
class YSyncPluginValue implements cmView.PluginValue {
|
||||
private readonly _ytext: Y.Text & { doc: Y.Doc }
|
||||
private readonly conf: YSyncConfig
|
||||
private readonly _observer: (event: Y.YTextEvent, tr: Y.Transaction) => void
|
||||
|
||||
constructor(private readonly view: cmView.EditorView) {
|
||||
this.conf = view.state.facet(ySyncFacet)
|
||||
this._observer = (event: Y.YTextEvent, tr: Y.Transaction) => {
|
||||
if (tr.origin !== this.conf) {
|
||||
const delta = event.delta
|
||||
const changes: { from: number; to: number; insert: string }[] = []
|
||||
let pos = 0
|
||||
for (const d of delta) {
|
||||
if (d.insert != null) {
|
||||
changes.push({ from: pos, to: pos, insert: d.insert as any })
|
||||
} else if (d.delete != null) {
|
||||
changes.push({ from: pos, to: pos + d.delete, insert: '' })
|
||||
pos += d.delete
|
||||
} else {
|
||||
assertDefined(d.retain)
|
||||
pos += d.retain
|
||||
}
|
||||
}
|
||||
view.dispatch({ changes, annotations: [ySyncAnnotation.of(this.conf)] })
|
||||
}
|
||||
}
|
||||
this._ytext = this.conf.ytext
|
||||
this._ytext.observe(this._observer)
|
||||
}
|
||||
|
||||
update(update: cmView.ViewUpdate) {
|
||||
if (
|
||||
!update.docChanged ||
|
||||
(update.transactions.length > 0 &&
|
||||
update.transactions[0]!.annotation(ySyncAnnotation) === this.conf)
|
||||
) {
|
||||
return
|
||||
}
|
||||
const ytext = this.conf.ytext
|
||||
ytext.doc.transact(() => {
|
||||
/**
|
||||
* This variable adjusts the fromA position to the current position in the Y.Text type.
|
||||
*/
|
||||
let adj = 0
|
||||
update.changes.iterChanges((fromA, toA, fromB, toB, insert) => {
|
||||
const insertText = insert.sliceString(0, insert.length, '\n')
|
||||
if (fromA !== toA) {
|
||||
ytext.delete(fromA + adj, toA - fromA)
|
||||
}
|
||||
if (insertText.length > 0) {
|
||||
ytext.insert(fromA + adj, insertText)
|
||||
}
|
||||
adj += insertText.length - (toA - fromA)
|
||||
})
|
||||
}, this.conf)
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this._ytext.unobserve(this._observer)
|
||||
}
|
||||
}
|
||||
|
||||
export const ySync = cmView.ViewPlugin.fromClass(YSyncPluginValue)
|
@ -0,0 +1,138 @@
|
||||
import { type StackItemEvent } from '@/components/codemirror/yCollab/yjsTypes'
|
||||
import * as cmState from '@codemirror/state'
|
||||
import * as cmView from '@codemirror/view'
|
||||
import { createMutex } from 'lib0/mutex'
|
||||
import * as Y from 'yjs'
|
||||
import { type YRange } from './y-range'
|
||||
import { ySyncAnnotation, type YSyncConfig, ySyncFacet } from './y-sync'
|
||||
|
||||
/** TODO: Add docs */
|
||||
export class YUndoManagerConfig {
|
||||
/** TODO: Add docs */
|
||||
constructor(readonly undoManager: Y.UndoManager) {}
|
||||
|
||||
/** TODO: Add docs */
|
||||
addTrackedOrigin(origin: unknown) {
|
||||
this.undoManager.addTrackedOrigin(origin)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
removeTrackedOrigin(origin: unknown) {
|
||||
this.undoManager.removeTrackedOrigin(origin)
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns Whether a change was undone.
|
||||
*/
|
||||
undo(): boolean {
|
||||
return this.undoManager.undo() != null
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns Whether a change was redone.
|
||||
*/
|
||||
redo(): boolean {
|
||||
return this.undoManager.redo() != null
|
||||
}
|
||||
}
|
||||
|
||||
export const yUndoManagerFacet = cmState.Facet.define<YUndoManagerConfig, YUndoManagerConfig>({
|
||||
combine(inputs) {
|
||||
return inputs[inputs.length - 1]!
|
||||
},
|
||||
})
|
||||
|
||||
export const yUndoManagerAnnotation = cmState.Annotation.define<YUndoManagerConfig>()
|
||||
|
||||
class YUndoManagerPluginValue implements cmView.PluginValue {
|
||||
private readonly conf: YUndoManagerConfig
|
||||
private readonly syncConf: YSyncConfig
|
||||
private _beforeChangeSelection: null | YRange
|
||||
private readonly _undoManager: Y.UndoManager
|
||||
private readonly _mux: (cb: () => void, elseCb?: (() => void) | undefined) => any
|
||||
private readonly _storeSelection: () => void
|
||||
private readonly _onStackItemAdded: (event: StackItemEvent) => void
|
||||
private readonly _onStackItemPopped: (event: StackItemEvent) => void
|
||||
|
||||
constructor(readonly view: cmView.EditorView) {
|
||||
this.conf = view.state.facet(yUndoManagerFacet)
|
||||
this._undoManager = this.conf.undoManager
|
||||
this.syncConf = view.state.facet(ySyncFacet)
|
||||
this._beforeChangeSelection = null
|
||||
this._mux = createMutex()
|
||||
|
||||
this._onStackItemAdded = ({ stackItem, changedParentTypes }: StackItemEvent) => {
|
||||
// only store metadata if this type was affected
|
||||
if (
|
||||
changedParentTypes.has(this.syncConf.ytext as any) &&
|
||||
this._beforeChangeSelection &&
|
||||
!stackItem.meta.has(this)
|
||||
) {
|
||||
// do not overwrite previous stored selection
|
||||
stackItem.meta.set(this, this._beforeChangeSelection)
|
||||
}
|
||||
}
|
||||
this._onStackItemPopped = ({ stackItem }: StackItemEvent) => {
|
||||
const sel = stackItem.meta.get(this)
|
||||
if (sel) {
|
||||
const selection = this.syncConf.fromYRange(sel)
|
||||
view.dispatch(
|
||||
view.state.update({
|
||||
selection,
|
||||
effects: [cmView.EditorView.scrollIntoView(selection)],
|
||||
}),
|
||||
)
|
||||
this._storeSelection()
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Do this without mutex, simply use the sync annotation
|
||||
*/
|
||||
this._storeSelection = () => {
|
||||
// store the selection before the change is applied so we can restore it with the undo manager.
|
||||
this._beforeChangeSelection = this.syncConf.toYRange(this.view.state.selection.main)
|
||||
}
|
||||
this._undoManager.on('stack-item-added', this._onStackItemAdded)
|
||||
this._undoManager.on('stack-item-popped', this._onStackItemPopped)
|
||||
this._undoManager.addTrackedOrigin(this.syncConf)
|
||||
}
|
||||
|
||||
update(update: cmView.ViewUpdate) {
|
||||
if (
|
||||
update.selectionSet &&
|
||||
(update.transactions.length === 0 ||
|
||||
update.transactions[0]!.annotation(ySyncAnnotation) !== this.syncConf)
|
||||
) {
|
||||
// This only works when YUndoManagerPlugin is included before the sync plugin
|
||||
this._storeSelection()
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this._undoManager.off('stack-item-added', this._onStackItemAdded)
|
||||
this._undoManager.off('stack-item-popped', this._onStackItemPopped)
|
||||
this._undoManager.removeTrackedOrigin(this.syncConf)
|
||||
}
|
||||
}
|
||||
export const yUndoManager = cmView.ViewPlugin.fromClass(YUndoManagerPluginValue)
|
||||
|
||||
export const undo: cmState.StateCommand = ({ state }) =>
|
||||
state.facet(yUndoManagerFacet).undo() || true
|
||||
|
||||
export const redo: cmState.StateCommand = ({ state }) =>
|
||||
state.facet(yUndoManagerFacet).redo() || true
|
||||
|
||||
export const undoDepth = (state: cmState.EditorState): number =>
|
||||
state.facet(yUndoManagerFacet).undoManager.undoStack.length
|
||||
|
||||
export const redoDepth = (state: cmState.EditorState): number =>
|
||||
state.facet(yUndoManagerFacet).undoManager.redoStack.length
|
||||
|
||||
/**
|
||||
* Default key bindings for the undo manager.
|
||||
*/
|
||||
export const yUndoManagerKeymap: cmView.KeyBinding[] = [
|
||||
{ key: 'Mod-z', run: undo, preventDefault: true },
|
||||
{ key: 'Mod-y', mac: 'Mod-Shift-z', run: redo, preventDefault: true },
|
||||
{ key: 'Mod-Shift-z', run: redo, preventDefault: true },
|
||||
]
|
28
app/gui/src/project-view/components/codemirror/yCollab/yjsTypes.d.ts
vendored
Normal file
28
app/gui/src/project-view/components/codemirror/yCollab/yjsTypes.d.ts
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
/** @file Types exposed by Yjs APIs, but not exported by name. */
|
||||
|
||||
import * as Y from 'yjs'
|
||||
|
||||
export interface StackItemEvent {
|
||||
stackItem: StackItem
|
||||
origin: unknown
|
||||
type: 'undo' | 'redo'
|
||||
changedParentTypes: Map<Y.AbstractType<Y.YEvent<any>>, Y.YEvent<any>[]>
|
||||
}
|
||||
|
||||
export interface StackItem {
|
||||
insertions: DeleteSet
|
||||
deletions: DeleteSet
|
||||
/**
|
||||
* Use this to save and restore metadata like selection range
|
||||
*/
|
||||
meta: Map<any, any>
|
||||
}
|
||||
|
||||
export interface DeleteSet {
|
||||
clients: Map<number, DeleteItem[]>
|
||||
}
|
||||
|
||||
export interface DeleteItem {
|
||||
clock: number
|
||||
len: number
|
||||
}
|
@ -19,6 +19,10 @@ const emit = defineEmits<{ 'update:modelValue': [modelValue: boolean] }>()
|
||||
background: var(--color-widget);
|
||||
}
|
||||
|
||||
.selected .Checkbox {
|
||||
background: color-mix(in oklab, var(--color-widget-selected) 30%, var(--color-node-primary) 70%);
|
||||
}
|
||||
|
||||
.Checkbox > div {
|
||||
width: var(--widget-checkbox-inner-size);
|
||||
height: var(--widget-checkbox-inner-size);
|
||||
|
@ -10,7 +10,7 @@ enum SortDirection {
|
||||
descending = 'descending',
|
||||
}
|
||||
|
||||
const props = defineProps<{ color: string; entries: Entry[] }>()
|
||||
const props = defineProps<{ color: string; backgroundColor: string; entries: Entry[] }>()
|
||||
const emit = defineEmits<{ clickEntry: [entry: Entry, keepOpen: boolean] }>()
|
||||
|
||||
const sortDirection = ref<SortDirection>(SortDirection.none)
|
||||
@ -56,7 +56,8 @@ const enableSortButton = ref(false)
|
||||
|
||||
const styleVars = computed(() => {
|
||||
return {
|
||||
'--dropdown-bg': props.color,
|
||||
'--dropdown-fg': props.color,
|
||||
'--dropdown-bg': props.backgroundColor,
|
||||
// Slightly shift the top border of drawn dropdown away from node's top border by a fraction of
|
||||
// a pixel, to prevent it from poking through and disturbing node's siluette.
|
||||
'--extend-margin': `${0.2 / (graphNavigator?.scale ?? 1)}px`,
|
||||
@ -112,6 +113,7 @@ export interface DropdownEntry {
|
||||
padding-top: var(--dropdown-extend);
|
||||
background-color: var(--dropdown-bg);
|
||||
border-radius: calc(var(--item-height) / 2 + var(--dropdown-padding));
|
||||
color: var(--dropdown-fg);
|
||||
|
||||
&:before {
|
||||
content: '';
|
||||
@ -130,7 +132,6 @@ export interface DropdownEntry {
|
||||
min-height: 16px;
|
||||
max-height: calc(var(--visible-items) * var(--item-height) + 2 * var(--dropdown-padding));
|
||||
list-style-type: none;
|
||||
color: var(--color-text-light);
|
||||
scrollbar-width: thin;
|
||||
padding: var(--dropdown-padding);
|
||||
position: relative;
|
||||
@ -146,7 +147,7 @@ export interface DropdownEntry {
|
||||
overflow: hidden;
|
||||
|
||||
&:hover {
|
||||
background-color: color-mix(in oklab, var(--color-port-connected) 50%, transparent 50%);
|
||||
background-color: color-mix(in oklab, var(--dropdown-bg) 50%, white 50%);
|
||||
span {
|
||||
--text-scroll-max: calc(var(--dropdown-max-width) - 28px);
|
||||
will-change: transform;
|
||||
@ -154,10 +155,6 @@ export interface DropdownEntry {
|
||||
}
|
||||
}
|
||||
|
||||
&:not(.selected):hover {
|
||||
color: white;
|
||||
}
|
||||
|
||||
&.selected {
|
||||
background-color: var(--color-port-connected);
|
||||
|
||||
|
@ -132,7 +132,7 @@ const displayedChildren = computed(() => {
|
||||
|
||||
const rootNode = ref<HTMLElement>()
|
||||
|
||||
const cssPropsToCopy = ['--node-color-primary', '--node-color-port', '--node-border-radius']
|
||||
const cssPropsToCopy = ['--color-node-primary', '--node-color-port', '--node-border-radius']
|
||||
|
||||
function onDragStart(event: DragEvent, index: number) {
|
||||
if (
|
||||
@ -490,13 +490,13 @@ div {
|
||||
}
|
||||
|
||||
.item .preview {
|
||||
background: var(--node-color-primary);
|
||||
background: var(--color-node-primary);
|
||||
padding: 4px;
|
||||
border-radius: var(--node-border-radius);
|
||||
}
|
||||
|
||||
.token {
|
||||
color: rgb(255 255 255 / 0.33);
|
||||
opacity: 0.33;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
@ -525,21 +525,6 @@ div {
|
||||
transition: box-shadow 0.2s ease;
|
||||
pointer-events: none;
|
||||
cursor: grab;
|
||||
|
||||
&:before {
|
||||
content: '';
|
||||
opacity: 0;
|
||||
transition: opacity 0.2s ease;
|
||||
position: absolute;
|
||||
display: block;
|
||||
left: -8px;
|
||||
right: -16px;
|
||||
top: -3px;
|
||||
bottom: -3px;
|
||||
border-radius: var(--node-border-radius) 0 0 var(--node-border-radius);
|
||||
background-color: var(--node-color-primary);
|
||||
z-index: -1;
|
||||
}
|
||||
}
|
||||
|
||||
.item:hover {
|
||||
@ -548,16 +533,16 @@ div {
|
||||
|
||||
.item:hover .handle {
|
||||
box-shadow:
|
||||
2px 0 0 rgb(255 255 255 / 0.5),
|
||||
-2px 0 0 rgb(255 255 255 / 0.5);
|
||||
2px 0 0 var(--color-widget-unfocus),
|
||||
-2px 0 0 var(--color-widget-unfocus);
|
||||
|
||||
&:hover {
|
||||
box-shadow:
|
||||
2px 0 0 rgb(255 255 255 / 0.8),
|
||||
-2px 0 0 rgb(255 255 255 / 0.8);
|
||||
2px 0 0 var(--color-widget-focus),
|
||||
-2px 0 0 var(--color-widget-focus);
|
||||
}
|
||||
|
||||
background: var(--node-color-primary);
|
||||
background: var(--color-node-background);
|
||||
pointer-events: all;
|
||||
|
||||
&:before {
|
||||
@ -605,7 +590,7 @@ div {
|
||||
left: -5000px;
|
||||
}
|
||||
:global(.ListWidget-drag-ghost > div) {
|
||||
background-color: var(--node-color-primary);
|
||||
background-color: var(--color-node-primary);
|
||||
border-radius: var(--node-border-radius);
|
||||
padding: 4px;
|
||||
color: white;
|
||||
|
@ -164,15 +164,18 @@ defineExpose({
|
||||
}
|
||||
}
|
||||
|
||||
.NumericInputWidget.slider {
|
||||
.selected .NumericInputWidget {
|
||||
background: var(--color-widget-unfocus);
|
||||
&:focus {
|
||||
/* Color will be blended with background defined below. */
|
||||
background-color: var(--color-widget);
|
||||
background: var(--color-widget-focus);
|
||||
}
|
||||
}
|
||||
|
||||
.NumericInputWidget.slider {
|
||||
background: linear-gradient(
|
||||
to right,
|
||||
var(--color-widget-focus) 0 calc(var(--slider-width) - 1px),
|
||||
var(--color-widget-slight) calc(var(--slider-width) - 1px) var(--slider-width),
|
||||
color-mix(in oklab, var(--color-widget-focus) 30%, var(--color-widget) 70%) 0
|
||||
var(--slider-width),
|
||||
var(--color-widget) var(--slider-width) 100%
|
||||
);
|
||||
}
|
||||
|
@ -60,6 +60,12 @@ func2 a =
|
||||
r
|
||||
|
||||
## The main method
|
||||
|
||||
Here we test images:
|
||||
|
||||
![Image](/images/image.png)
|
||||
![Image](../images/image.png)
|
||||
![Image](</images/image.png>)
|
||||
main =
|
||||
five = 5
|
||||
ten = 10
|
||||
@ -84,6 +90,16 @@ const fileTree = {
|
||||
return mainFile
|
||||
},
|
||||
},
|
||||
images: {
|
||||
get 'image.png'() {
|
||||
return new Uint16Array([
|
||||
20617, 18254, 2573, 2586, 0, 3328, 18505, 21060, 0, 768, 0, 768, 772, 0, 41984, 43014, 140,
|
||||
0, 20501, 21580, 65093, 13106, 11262, 64043, 27756, 24571, 64863, 14906, 12030, 65070,
|
||||
10023, 29424, 11222, 0, 4352, 17481, 21569, 55048, 28771, 24661, 4960, 24672, 52, 768, 161,
|
||||
21933, 29603, 124, 0, 18688, 20037, 44612, 24642, 130,
|
||||
]).buffer
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const visualizations = new Map<Uuid, VisualizationConfiguration>()
|
||||
|
@ -9,7 +9,6 @@ import {
|
||||
type Identifier,
|
||||
} from '@/util/qualifiedName'
|
||||
import * as array from 'lib0/array'
|
||||
import * as object from 'lib0/object'
|
||||
import { ObservableV2 } from 'lib0/observable'
|
||||
import * as random from 'lib0/random'
|
||||
import { reactive } from 'vue'
|
||||
|
@ -31,13 +31,9 @@ import {
|
||||
type WatchSource,
|
||||
type WritableComputedRef,
|
||||
} from 'vue'
|
||||
import {
|
||||
Error as DataError,
|
||||
OutboundPayload,
|
||||
VisualizationUpdate,
|
||||
} from 'ydoc-shared/binaryProtocol'
|
||||
import { OutboundPayload, VisualizationUpdate } from 'ydoc-shared/binaryProtocol'
|
||||
import { LanguageServer } from 'ydoc-shared/languageServer'
|
||||
import type { Diagnostic, ExpressionId, MethodPointer, Path } from 'ydoc-shared/languageServerTypes'
|
||||
import type { Diagnostic, ExpressionId, MethodPointer } from 'ydoc-shared/languageServerTypes'
|
||||
import { type AbortScope } from 'ydoc-shared/util/net'
|
||||
import {
|
||||
DistributedProject,
|
||||
@ -130,7 +126,9 @@ export const { provideFn: provideProjectStore, injectFn: useProjectStore } = cre
|
||||
const clientId = random.uuidv4() as Uuid
|
||||
const lsUrls = resolveLsUrl(config.value)
|
||||
const lsRpcConnection = createLsRpcConnection(clientId, lsUrls.rpcUrl, abort)
|
||||
const contentRoots = lsRpcConnection.contentRoots
|
||||
const projectRootId = lsRpcConnection.contentRoots.then(
|
||||
(roots) => roots.find((root) => root.type === 'Project')?.id,
|
||||
)
|
||||
|
||||
const dataConnection = initializeDataConnection(clientId, lsUrls.dataUrl, abort)
|
||||
const rpcUrl = new URL(lsUrls.rpcUrl)
|
||||
@ -384,22 +382,6 @@ export const { provideFn: provideProjectStore, injectFn: useProjectStore } = cre
|
||||
}
|
||||
})
|
||||
|
||||
const projectRootId = contentRoots.then(
|
||||
(roots) => roots.find((root) => root.type === 'Project')?.id,
|
||||
)
|
||||
|
||||
async function readFileBinary(path: Path): Promise<Result<Blob>> {
|
||||
const result = await dataConnection.readFile(path)
|
||||
if (result instanceof DataError) {
|
||||
return Err(result.message() ?? 'Failed to read file.')
|
||||
}
|
||||
const contents = result.contentsArray()
|
||||
if (contents == null) {
|
||||
return Err('No file contents received.')
|
||||
}
|
||||
return Ok(new Blob([contents]))
|
||||
}
|
||||
|
||||
return proxyRefs({
|
||||
setObservedFileName(name: string) {
|
||||
observedFileName.value = name
|
||||
@ -423,7 +405,6 @@ export const { provideFn: provideProjectStore, injectFn: useProjectStore } = cre
|
||||
computedValueRegistry: markRaw(computedValueRegistry),
|
||||
lsRpcConnection: markRaw(lsRpcConnection),
|
||||
dataConnection: markRaw(dataConnection),
|
||||
readFileBinary,
|
||||
useVisualizationData,
|
||||
isRecordingEnabled,
|
||||
stopCapturingUndo,
|
||||
|
150
app/gui/src/project-view/stores/projectFiles.ts
Normal file
150
app/gui/src/project-view/stores/projectFiles.ts
Normal file
@ -0,0 +1,150 @@
|
||||
import { DataServer } from '@/util/net/dataServer'
|
||||
import { bytesToHex, Hash } from '@noble/hashes/utils'
|
||||
import { Error as DataError } from 'ydoc-shared/binaryProtocol'
|
||||
import { ErrorCode, LanguageServer, RemoteRpcError } from 'ydoc-shared/languageServer'
|
||||
import { Path, Uuid } from 'ydoc-shared/languageServerTypes'
|
||||
import { Err, Ok, Result, withContext } from 'ydoc-shared/util/data/result'
|
||||
|
||||
export type ProjectFiles = ReturnType<typeof useProjectFiles>
|
||||
|
||||
/**
|
||||
* A composable with project files operations.
|
||||
*/
|
||||
export function useProjectFiles(projectStore: {
|
||||
projectRootId: Promise<Uuid | undefined>
|
||||
lsRpcConnection: LanguageServer
|
||||
dataConnection: DataServer
|
||||
}) {
|
||||
const { projectRootId, lsRpcConnection: lsRpc, dataConnection } = projectStore
|
||||
|
||||
async function readFileBinary(path: Path): Promise<Result<Blob>> {
|
||||
const result = await dataConnection.readFile(path)
|
||||
if (result instanceof DataError) {
|
||||
return Err(result.message() ?? 'Failed to read file.')
|
||||
}
|
||||
const contents = result.contentsArray()
|
||||
if (contents == null) {
|
||||
return Err('No file contents received.')
|
||||
}
|
||||
return Ok(new Blob([contents]))
|
||||
}
|
||||
|
||||
async function writeFileBinary(path: Path, content: Blob): Promise<Result> {
|
||||
const result = await dataConnection.writeFile(path, await content.arrayBuffer())
|
||||
if (result instanceof DataError) {
|
||||
return Err(result.message() ?? 'Failed to write file.')
|
||||
}
|
||||
return Ok()
|
||||
}
|
||||
|
||||
async function writeBytes(
|
||||
path: Path,
|
||||
offset: bigint,
|
||||
overwriteExisting: boolean,
|
||||
contents: string | ArrayBuffer | Uint8Array,
|
||||
): Promise<Result> {
|
||||
const result = await dataConnection.writeBytes(path, offset, overwriteExisting, contents)
|
||||
if (result instanceof DataError) {
|
||||
return Err(result.message() ?? 'Failed to write bytes.')
|
||||
}
|
||||
return Ok()
|
||||
}
|
||||
|
||||
async function deleteFile(path: Path) {
|
||||
return lsRpc.deleteFile(path)
|
||||
}
|
||||
|
||||
/** Check if directory exists and try to create one if missing. */
|
||||
async function ensureDirExists(path: Path): Promise<Result<void>> {
|
||||
const exists = await dirExists(path)
|
||||
if (!exists.ok) return exists
|
||||
if (exists.value) return Ok()
|
||||
|
||||
const name = path.segments.at(-1)
|
||||
if (name == null) return Err('Cannot create context root')
|
||||
|
||||
return await withContext(
|
||||
() => 'When creating directory for uploaded file',
|
||||
async () => {
|
||||
return await lsRpc.createFile({
|
||||
type: 'Directory',
|
||||
name,
|
||||
path: { rootId: path.rootId, segments: path.segments.slice(0, -1) },
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if directory exists. If it does not, or it is a file, `Ok(false)` is returned.
|
||||
* In case of error, the directory existence is not confirmed nor disproved.
|
||||
*/
|
||||
async function dirExists(path: Path): Promise<Result<boolean>> {
|
||||
const info = await lsRpc.fileInfo(path)
|
||||
if (info.ok) return Ok(info.value.attributes.kind.type == 'Directory')
|
||||
else if (
|
||||
info.error.payload.cause instanceof RemoteRpcError &&
|
||||
(info.error.payload.cause.code === ErrorCode.FILE_NOT_FOUND ||
|
||||
info.error.payload.cause.code === ErrorCode.CONTENT_ROOT_NOT_FOUND)
|
||||
) {
|
||||
return Ok(false)
|
||||
} else {
|
||||
return info
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a name for a file which does not collide with existing files in `path`.
|
||||
*
|
||||
* First choice is `suggestedName`, and then try to apply a numeric suffix to stem.
|
||||
*/
|
||||
async function pickUniqueName(path: Path, suggestedName: string): Promise<Result<string>> {
|
||||
const files = await lsRpc.listFiles(path)
|
||||
if (!files.ok) return files
|
||||
const existingNames = new Set(files.value.paths.map((path) => path.name))
|
||||
const { stem, extension = '' } = splitFilename(suggestedName)
|
||||
let candidate = suggestedName
|
||||
let num = 1
|
||||
while (existingNames.has(candidate)) {
|
||||
candidate = `${stem}_${num}.${extension}`
|
||||
num += 1
|
||||
}
|
||||
return Ok(candidate)
|
||||
}
|
||||
|
||||
async function assertChecksum<T extends Hash<T>>(
|
||||
path: Path,
|
||||
checksum: Hash<T>,
|
||||
): Promise<Result<void>> {
|
||||
const engineChecksum = await lsRpc.fileChecksum(path)
|
||||
if (!engineChecksum.ok) return engineChecksum
|
||||
const hexChecksum = bytesToHex(checksum.digest())
|
||||
if (hexChecksum != engineChecksum.value.checksum) {
|
||||
return Err(`Checksum does not match. ${hexChecksum} != ${engineChecksum.value.checksum}`)
|
||||
} else {
|
||||
return Ok()
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
projectRootId,
|
||||
readFileBinary,
|
||||
writeFileBinary,
|
||||
writeBytes,
|
||||
deleteFile,
|
||||
ensureDirExists,
|
||||
pickUniqueName,
|
||||
assertChecksum,
|
||||
}
|
||||
}
|
||||
|
||||
/** Split filename into stem and (optional) extension. */
|
||||
function splitFilename(fileName: string): { stem: string; extension?: string } {
|
||||
const dotIndex = fileName.lastIndexOf('.')
|
||||
if (dotIndex !== -1 && dotIndex !== 0) {
|
||||
const stem = fileName.substring(0, dotIndex)
|
||||
const extension = fileName.substring(dotIndex + 1)
|
||||
return { stem, extension }
|
||||
}
|
||||
return { stem: fileName }
|
||||
}
|
@ -160,10 +160,15 @@ export class DataServer extends ObservableV2<DataServerEvents> {
|
||||
return initResult.error.payload
|
||||
}
|
||||
}
|
||||
this.websocket.send(builder.finish(rootTable).toArrayBuffer())
|
||||
const promise = new Promise<T | Error>((resolve) => {
|
||||
this.resolveCallbacks.set(messageUuid, resolve)
|
||||
})
|
||||
try {
|
||||
this.websocket.send(builder.finish(rootTable).toArrayBuffer())
|
||||
} catch (e: unknown) {
|
||||
this.resolveCallbacks.delete(messageUuid)
|
||||
throw e
|
||||
}
|
||||
return promise
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
// We are using `react-toastify`, since we share toast environment with dashboard.
|
||||
import type { ResultError } from '@/util/data/result'
|
||||
import { uuidv4 } from 'lib0/random'
|
||||
// We are using `react-toastify`, since we share toast environment with dashboard.
|
||||
import { toast, type ToastContent, type ToastOptions, type TypeOptions } from 'react-toastify'
|
||||
import { onScopeDispose } from 'vue'
|
||||
|
||||
@ -15,7 +15,25 @@ export interface UseToastOptions extends ToastOptions {
|
||||
outliveScope?: boolean
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
/**
|
||||
* Composable for new toast - a pop-up message displayed to the user.
|
||||
*
|
||||
* ```ts
|
||||
* // useToast.error is an equivalent of useToast(type: 'error').
|
||||
* // There's also useToast.info, useToast.warning and useToast.success.
|
||||
* const toastLspError = useToast.error()
|
||||
* // Every `useToast` allow displaying only one message at once, so
|
||||
* // here we create separate toast for every "topic".
|
||||
* const toastExecutionFailed = useToast.error()
|
||||
* const toastUserActionFailed = useToast.error()
|
||||
* // Toast are automatically closed after some time. Here we suppress this.
|
||||
* const toastStartup = useToast.info({ autoClose: false })
|
||||
* const toastConnectionLost = useToast.error({ autoClose: false })
|
||||
*
|
||||
* ```
|
||||
*
|
||||
* For details, read about `toastify` library.
|
||||
*/
|
||||
export function useToast(options: UseToastOptions = {}) {
|
||||
const id = makeToastId()
|
||||
if (options?.outliveScope !== true) {
|
||||
@ -23,15 +41,18 @@ export function useToast(options: UseToastOptions = {}) {
|
||||
}
|
||||
|
||||
return {
|
||||
/** Show or update toast. */
|
||||
show(content: ToastContent) {
|
||||
if (toast.isActive(id)) toast.update(id, { ...options, render: content })
|
||||
else toast(content, { ...options, toastId: id })
|
||||
},
|
||||
/** A helper for reporting {@link ResultError} to both toast and console. */
|
||||
reportError<E>(result: ResultError<E>, preamble?: string) {
|
||||
const msg = result.message(preamble)
|
||||
console.error(msg)
|
||||
this.show(msg)
|
||||
},
|
||||
/** Dismiss the displayed toast. */
|
||||
dismiss() {
|
||||
toast.dismiss(id)
|
||||
},
|
||||
|
@ -158,10 +158,6 @@ export default /** @satisfies {import('tailwindcss').Config} */ ({
|
||||
|
||||
'members-name-column': 'var(--members-name-column-width)',
|
||||
'members-email-column': 'var(--members-email-column-width)',
|
||||
'keyboard-shortcuts-icon-column': 'var(--keyboard-shortcuts-icon-column-width)',
|
||||
'keyboard-shortcuts-name-column': 'var(--keyboard-shortcuts-name-column-width)',
|
||||
'keyboard-shortcuts-description-column':
|
||||
'var(--keyboard-shortcuts-description-column-width)',
|
||||
'drive-name-column': 'var(--drive-name-column-width)',
|
||||
'drive-modified-column': 'var(--drive-modified-column-width)',
|
||||
'drive-shared-with-column': 'var(--drive-shared-with-column-width)',
|
||||
@ -226,8 +222,6 @@ export default /** @satisfies {import('tailwindcss').Config} */ ({
|
||||
'settings-entry': 'var(--settings-entry-gap)',
|
||||
'settings-sidebar': 'var(--settings-sidebar-gap)',
|
||||
'new-empty-project': 'var(--new-empty-project-gap)',
|
||||
modifiers: 'var(--modifiers-gap)',
|
||||
'modifiers-macos': 'var(--modifiers-macos-gap)',
|
||||
'side-panel': 'var(--side-panel-gap)',
|
||||
'side-panel-section': 'var(--side-panel-section-gap)',
|
||||
'asset-search-bar': 'var(--asset-search-bar-gap)',
|
||||
@ -244,7 +238,6 @@ export default /** @satisfies {import('tailwindcss').Config} */ ({
|
||||
'context-menus': 'var(--context-menus-gap)',
|
||||
'asset-panel': 'var(--asset-panel-gap)',
|
||||
'search-suggestions': 'var(--search-suggestions-gap)',
|
||||
'keyboard-shortcuts-button': 'var(--keyboard-shortcuts-button-gap)',
|
||||
'chat-buttons': 'var(--chat-buttons-gap)',
|
||||
},
|
||||
padding: {
|
||||
|
22
app/licenses/MIT-yCollab-LICENSE
Normal file
22
app/licenses/MIT-yCollab-LICENSE
Normal file
@ -0,0 +1,22 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2024
|
||||
- Kevin Jahns <kevin.jahns@protonmail.com>.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@ -35,6 +35,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"enso-common": "workspace:*",
|
||||
"@lezer/common": "^1.1.0",
|
||||
"@lezer/markdown": "^1.3.1",
|
||||
"@noble/hashes": "^1.4.0",
|
||||
"@open-rpc/client-js": "^1.8.1",
|
||||
"@types/debug": "^4.1.12",
|
||||
|
@ -1,3 +1,4 @@
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import { describe, expect, test } from 'vitest'
|
||||
import { assert } from '../../util/assert'
|
||||
import { MutableModule } from '../mutableModule'
|
||||
@ -91,7 +92,7 @@ test('Creating comments: indented', () => {
|
||||
expect(statement.module.root()?.code()).toBe(`main =\n ## ${docText}\n x = 1`)
|
||||
})
|
||||
|
||||
describe('Markdown documentation', () => {
|
||||
describe('Function documentation (Markdown)', () => {
|
||||
const cases = [
|
||||
{
|
||||
source: '## My function',
|
||||
@ -101,6 +102,10 @@ describe('Markdown documentation', () => {
|
||||
source: '## My function\n\n Second paragraph',
|
||||
markdown: 'My function\nSecond paragraph',
|
||||
},
|
||||
{
|
||||
source: '## Trailing whitespace \n\n Second paragraph',
|
||||
markdown: 'Trailing whitespace \nSecond paragraph',
|
||||
},
|
||||
{
|
||||
source: '## My function\n\n\n Second paragraph after extra gap',
|
||||
markdown: 'My function\n\nSecond paragraph after extra gap',
|
||||
@ -141,14 +146,23 @@ describe('Markdown documentation', () => {
|
||||
'the Enso syntax specification which requires line length not to exceed 100 characters.',
|
||||
].join(' '), // TODO: This should be '\n ' when hard-wrapping is implemented.
|
||||
},
|
||||
{
|
||||
source: '## Table below:\n | a | b |\n |---|---|',
|
||||
markdown: 'Table below:\n| a | b |\n|---|---|',
|
||||
},
|
||||
{
|
||||
source: '## Table below:\n\n | a | b |\n |---|---|',
|
||||
markdown: 'Table below:\n\n| a | b |\n|---|---|',
|
||||
},
|
||||
]
|
||||
|
||||
test.each(cases)('Enso source comments to markdown', ({ source, markdown }) => {
|
||||
test.each(cases)('Enso source comments to normalized markdown', ({ source, markdown }) => {
|
||||
const moduleSource = `${source}\nmain =\n x = 1`
|
||||
const topLevel = parseModule(moduleSource)
|
||||
topLevel.module.setRoot(topLevel)
|
||||
const main = [...topLevel.statements()][0]
|
||||
const main = iter.first(topLevel.statements())
|
||||
assert(main instanceof MutableFunctionDef)
|
||||
expect(main.name.code()).toBe('main')
|
||||
expect(main.mutableDocumentationMarkdown().toJSON()).toBe(markdown)
|
||||
})
|
||||
|
||||
@ -156,7 +170,7 @@ describe('Markdown documentation', () => {
|
||||
const functionCode = 'main =\n x = 1'
|
||||
const topLevel = parseModule(functionCode)
|
||||
topLevel.module.setRoot(topLevel)
|
||||
const main = [...topLevel.statements()][0]
|
||||
const main = iter.first(topLevel.statements())
|
||||
assert(main instanceof MutableFunctionDef)
|
||||
const markdownYText = main.mutableDocumentationMarkdown()
|
||||
expect(markdownYText.toJSON()).toBe('')
|
||||
@ -202,7 +216,7 @@ describe('Markdown documentation', () => {
|
||||
const topLevel = parseModule(originalSourceWithDocComment)
|
||||
expect(topLevel.code()).toBe(originalSourceWithDocComment)
|
||||
|
||||
const main = [...topLevel.statements()][0]
|
||||
const main = iter.first(topLevel.statements())
|
||||
assert(main instanceof MutableFunctionDef)
|
||||
const markdownYText = main.mutableDocumentationMarkdown()
|
||||
markdownYText.delete(0, markdownYText.length)
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { LINE_BOUNDARIES } from 'enso-common/src/utilities/data/string'
|
||||
import { markdownParser } from './ensoMarkdown'
|
||||
import { xxHash128 } from './ffi'
|
||||
import type { ConcreteChild, RawConcreteChild } from './print'
|
||||
import { ensureUnspaced, firstChild, preferUnspaced, unspaced } from './print'
|
||||
@ -32,6 +33,8 @@ export function* docLineToConcrete(
|
||||
for (const newline of docLine.newlines) yield preferUnspaced(newline)
|
||||
}
|
||||
|
||||
// === Markdown ===
|
||||
|
||||
/**
|
||||
* Render function documentation to concrete tokens. If the `markdown` content has the same value as when `docLine` was
|
||||
* parsed (as indicated by `hash`), the `docLine` will be used (preserving concrete formatting). If it is different, the
|
||||
@ -42,95 +45,161 @@ export function functionDocsToConcrete(
|
||||
hash: string | undefined,
|
||||
docLine: DeepReadonly<DocLine> | undefined,
|
||||
indent: string | null,
|
||||
): IterableIterator<RawConcreteChild> | undefined {
|
||||
): Iterable<RawConcreteChild> | undefined {
|
||||
return (
|
||||
hash && docLine && xxHash128(markdown) === hash ? docLineToConcrete(docLine, indent)
|
||||
: markdown ? yTextToTokens(markdown, (indent || '') + ' ')
|
||||
: markdown ? markdownYTextToTokens(markdown, (indent || '') + ' ')
|
||||
: undefined
|
||||
)
|
||||
}
|
||||
|
||||
function markdownYTextToTokens(yText: string, indent: string): Iterable<ConcreteChild<Token>> {
|
||||
const tokensBuilder = new DocTokensBuilder(indent)
|
||||
standardizeMarkdown(yText, tokensBuilder)
|
||||
return tokensBuilder.build()
|
||||
}
|
||||
|
||||
/**
|
||||
* Given Enso documentation comment tokens, returns a model of their Markdown content. This model abstracts away details
|
||||
* such as the locations of line breaks that are not paragraph breaks (e.g. lone newlines denoting hard-wrapping of the
|
||||
* source code).
|
||||
*/
|
||||
export function abstractMarkdown(elements: undefined | TextToken<ConcreteRefs>[]) {
|
||||
let markdown = ''
|
||||
let newlines = 0
|
||||
let readingTags = true
|
||||
let elidedNewline = false
|
||||
;(elements ?? []).forEach(({ token: { node } }, i) => {
|
||||
if (node.tokenType_ === TokenType.Newline) {
|
||||
if (readingTags || newlines > 0) {
|
||||
markdown += '\n'
|
||||
elidedNewline = false
|
||||
} else {
|
||||
elidedNewline = true
|
||||
}
|
||||
newlines += 1
|
||||
} else {
|
||||
let nodeCode = node.code()
|
||||
if (i === 0) nodeCode = nodeCode.trimStart()
|
||||
if (elidedNewline) markdown += ' '
|
||||
markdown += nodeCode
|
||||
newlines = 0
|
||||
if (readingTags) {
|
||||
if (!nodeCode.startsWith('ICON ')) {
|
||||
readingTags = false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
const { tags, rawMarkdown } = toRawMarkdown(elements)
|
||||
const markdown = [...tags, normalizeMarkdown(rawMarkdown)].join('\n')
|
||||
const hash = xxHash128(markdown)
|
||||
return { markdown, hash }
|
||||
}
|
||||
|
||||
// TODO: Paragraphs should be hard-wrapped to fit within the column limit, but this requires:
|
||||
// - Recognizing block elements other than paragraphs; we must not split non-paragraph elements.
|
||||
// - Recognizing inline elements; some cannot be split (e.g. links), while some can be broken into two (e.g. bold).
|
||||
// If we break inline elements, we must also combine them when encountered during parsing.
|
||||
const ENABLE_INCOMPLETE_WORD_WRAP_SUPPORT = false
|
||||
|
||||
function* yTextToTokens(yText: string, indent: string): IterableIterator<ConcreteChild<Token>> {
|
||||
yield unspaced(Token.new('##', TokenType.TextStart))
|
||||
const lines = yText.split(LINE_BOUNDARIES)
|
||||
let printingTags = true
|
||||
for (const [i, value] of lines.entries()) {
|
||||
if (i) {
|
||||
yield unspaced(Token.new('\n', TokenType.Newline))
|
||||
if (value && !printingTags) yield unspaced(Token.new('\n', TokenType.Newline))
|
||||
}
|
||||
printingTags = printingTags && value.startsWith('ICON ')
|
||||
let offset = 0
|
||||
while (offset < value.length) {
|
||||
if (offset !== 0) yield unspaced(Token.new('\n', TokenType.Newline))
|
||||
let wrappedLineEnd = value.length
|
||||
let printableOffset = offset
|
||||
if (i !== 0) {
|
||||
while (printableOffset < value.length && value[printableOffset] === ' ')
|
||||
printableOffset += 1
|
||||
function toRawMarkdown(elements: undefined | TextToken<ConcreteRefs>[]) {
|
||||
const tags: string[] = []
|
||||
let readingTags = true
|
||||
let rawMarkdown = ''
|
||||
;(elements ?? []).forEach(({ token: { node } }, i) => {
|
||||
if (node.tokenType_ === TokenType.Newline) {
|
||||
if (!readingTags) {
|
||||
rawMarkdown += '\n'
|
||||
}
|
||||
if (ENABLE_INCOMPLETE_WORD_WRAP_SUPPORT && !printingTags) {
|
||||
const ENSO_SOURCE_MAX_COLUMNS = 100
|
||||
const MIN_DOC_COLUMNS = 40
|
||||
const availableWidth = Math.max(
|
||||
ENSO_SOURCE_MAX_COLUMNS - indent.length - (i === 0 && offset === 0 ? '## '.length : 0),
|
||||
MIN_DOC_COLUMNS,
|
||||
)
|
||||
if (availableWidth < wrappedLineEnd - printableOffset) {
|
||||
const wrapIndex = value.lastIndexOf(' ', printableOffset + availableWidth)
|
||||
if (printableOffset < wrapIndex) {
|
||||
wrappedLineEnd = wrapIndex
|
||||
}
|
||||
} else {
|
||||
let nodeCode = node.code()
|
||||
if (i === 0) nodeCode = nodeCode.trimStart()
|
||||
if (readingTags) {
|
||||
if (nodeCode.startsWith('ICON ')) {
|
||||
tags.push(nodeCode)
|
||||
} else {
|
||||
readingTags = false
|
||||
}
|
||||
}
|
||||
while (printableOffset < value.length && value[printableOffset] === ' ') printableOffset += 1
|
||||
const whitespace = i === 0 && offset === 0 ? ' ' : indent
|
||||
const wrappedLine = value.substring(printableOffset, wrappedLineEnd)
|
||||
yield { whitespace, node: Token.new(wrappedLine, TokenType.TextSection) }
|
||||
offset = wrappedLineEnd
|
||||
if (!readingTags) {
|
||||
rawMarkdown += nodeCode
|
||||
}
|
||||
}
|
||||
}
|
||||
yield unspaced(Token.new('\n', TokenType.Newline))
|
||||
})
|
||||
return { tags, rawMarkdown }
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the Markdown input to a format with rendered-style linebreaks: Hard-wrapped lines within a paragraph will be
|
||||
* joined, and only a single linebreak character is used to separate paragraphs.
|
||||
*/
|
||||
function normalizeMarkdown(rawMarkdown: string): string {
|
||||
let normalized = ''
|
||||
let prevTo = 0
|
||||
let prevName: string | undefined = undefined
|
||||
const cursor = markdownParser.parse(rawMarkdown).cursor()
|
||||
cursor.firstChild()
|
||||
do {
|
||||
if (prevTo < cursor.from) {
|
||||
const textBetween = rawMarkdown.slice(prevTo, cursor.from)
|
||||
normalized +=
|
||||
cursor.name === 'Paragraph' && prevName !== 'Table' ? textBetween.slice(0, -1) : textBetween
|
||||
}
|
||||
const text = rawMarkdown.slice(cursor.from, cursor.to)
|
||||
normalized += cursor.name === 'Paragraph' ? text.replaceAll(/ *\n */g, ' ') : text
|
||||
prevTo = cursor.to
|
||||
prevName = cursor.name
|
||||
} while (cursor.nextSibling())
|
||||
return normalized
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from "normalized" Markdown to the on-disk representation, with paragraphs hard-wrapped and separated by blank
|
||||
* lines.
|
||||
*/
|
||||
function standardizeMarkdown(normalizedMarkdown: string, textConsumer: TextConsumer) {
|
||||
let prevTo = 0
|
||||
let prevName: string | undefined = undefined
|
||||
let printingTags = true
|
||||
const cursor = markdownParser.parse(normalizedMarkdown).cursor()
|
||||
cursor.firstChild()
|
||||
do {
|
||||
if (prevTo < cursor.from) {
|
||||
const betweenText = normalizedMarkdown.slice(prevTo, cursor.from)
|
||||
for (const _match of betweenText.matchAll(LINE_BOUNDARIES)) {
|
||||
textConsumer.newline()
|
||||
}
|
||||
if (cursor.name === 'Paragraph' && prevName !== 'Table') {
|
||||
textConsumer.newline()
|
||||
}
|
||||
}
|
||||
const lines = normalizedMarkdown.slice(cursor.from, cursor.to).split(LINE_BOUNDARIES)
|
||||
if (cursor.name === 'Paragraph') {
|
||||
let printingNonTags = false
|
||||
lines.forEach((line, i) => {
|
||||
if (printingTags) {
|
||||
if (cursor.name === 'Paragraph' && line.startsWith('ICON ')) {
|
||||
textConsumer.text(line)
|
||||
} else {
|
||||
printingTags = false
|
||||
}
|
||||
}
|
||||
if (!printingTags) {
|
||||
if (i > 0) {
|
||||
textConsumer.newline()
|
||||
if (printingNonTags) textConsumer.newline()
|
||||
}
|
||||
textConsumer.wrapText(line)
|
||||
printingNonTags = true
|
||||
}
|
||||
})
|
||||
} else {
|
||||
lines.forEach((line, i) => {
|
||||
if (i > 0) textConsumer.newline()
|
||||
textConsumer.text(line)
|
||||
})
|
||||
printingTags = false
|
||||
}
|
||||
prevTo = cursor.to
|
||||
prevName = cursor.name
|
||||
} while (cursor.nextSibling())
|
||||
}
|
||||
|
||||
interface TextConsumer {
|
||||
text: (text: string) => void
|
||||
wrapText: (text: string) => void
|
||||
newline: () => void
|
||||
}
|
||||
|
||||
class DocTokensBuilder implements TextConsumer {
|
||||
private readonly tokens: ConcreteChild<Token>[] = [unspaced(Token.new('##', TokenType.TextStart))]
|
||||
|
||||
constructor(private readonly indent: string) {}
|
||||
|
||||
text(text: string): void {
|
||||
const whitespace = this.tokens.length === 1 ? ' ' : this.indent
|
||||
this.tokens.push({ whitespace, node: Token.new(text, TokenType.TextSection) })
|
||||
}
|
||||
|
||||
wrapText(text: string): void {
|
||||
this.text(text)
|
||||
}
|
||||
|
||||
newline(): void {
|
||||
this.tokens.push(unspaced(Token.new('\n', TokenType.Newline)))
|
||||
}
|
||||
|
||||
build(): ConcreteChild<Token>[] {
|
||||
this.newline()
|
||||
return this.tokens
|
||||
}
|
||||
}
|
||||
|
@ -1,31 +1,20 @@
|
||||
import { markdown as baseMarkdown, markdownLanguage } from '@codemirror/lang-markdown'
|
||||
import type { Extension } from '@codemirror/state'
|
||||
import type { Tree } from '@lezer/common'
|
||||
import type { BlockContext, BlockParser, Line, MarkdownParser, NodeSpec } from '@lezer/markdown'
|
||||
import { Element } from '@lezer/markdown'
|
||||
import { TreeCursor } from '@lezer/common'
|
||||
import type {
|
||||
BlockContext,
|
||||
BlockParser,
|
||||
DelimiterType,
|
||||
InlineContext,
|
||||
InlineDelimiter,
|
||||
InlineParser,
|
||||
Line,
|
||||
MarkdownParser,
|
||||
NodeSpec,
|
||||
} from '@lezer/markdown'
|
||||
import { parser as baseParser, Element, Emoji, GFM, Subscript, Superscript } from '@lezer/markdown'
|
||||
import { assertDefined } from 'ydoc-shared/util/assert'
|
||||
|
||||
/**
|
||||
* Enso Markdown extension. Differences from CodeMirror's base Markdown extension:
|
||||
* - It defines the flavor of Markdown supported in Enso documentation. Currently, this is mostly CommonMark except we
|
||||
* don't support setext headings. Planned features include support for some GFM extensions.
|
||||
* - Many of the parsers differ from the `@lezer/markdown` parsers in their treatment of whitespace, in order to support
|
||||
* a rendering mode where markup (and some associated spacing) is hidden.
|
||||
*/
|
||||
export function markdown(): Extension {
|
||||
return baseMarkdown({
|
||||
base: markdownLanguage,
|
||||
extensions: [
|
||||
{
|
||||
parseBlock: [headerParser, bulletList, orderedList, blockquoteParser, disableSetextHeading],
|
||||
defineNodes: [blockquoteNode],
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
|
||||
function getType({ parser }: { parser: MarkdownParser }, name: string) {
|
||||
const ty = parser.nodeSet.types.find((ty) => ty.name === name)
|
||||
const ty = parser.nodeSet.types.find(ty => ty.name === name)
|
||||
assertDefined(ty)
|
||||
return ty.id
|
||||
}
|
||||
@ -138,8 +127,8 @@ const blockquoteNode: NodeSpec = {
|
||||
},
|
||||
}
|
||||
|
||||
function elt(type: number, from: number, to: number): Element {
|
||||
return new (Element as any)(type, from, to)
|
||||
function elt(type: number, from: number, to: number, children?: readonly Element[]): Element {
|
||||
return new (Element as any)(type, from, to, children)
|
||||
}
|
||||
|
||||
function isBlockquote(line: Line) {
|
||||
@ -196,6 +185,212 @@ function getListIndent(line: Line, pos: number) {
|
||||
return indented >= indentAfter + 5 ? indentAfter + 1 : indented
|
||||
}
|
||||
|
||||
// === Link ===
|
||||
|
||||
const enum Mark {
|
||||
None = 0,
|
||||
Open = 1,
|
||||
Close = 2,
|
||||
}
|
||||
|
||||
const LinkStart: DelimiterType = {}
|
||||
const ImageStart: DelimiterType = {}
|
||||
|
||||
const linkParser: InlineParser = {
|
||||
name: 'Link',
|
||||
parse: (cx, next, start) => {
|
||||
return next == 91 /* '[' */ ? cx.addDelimiter(LinkStart, start, start + 1, true, false) : -1
|
||||
},
|
||||
}
|
||||
|
||||
const imageParser: InlineParser = {
|
||||
name: 'Image',
|
||||
parse: (cx, next, start) => {
|
||||
return next == 33 /* '!' */ && cx.char(start + 1) == 91 /* '[' */ ?
|
||||
cx.addDelimiter(ImageStart, start, start + 2, true, false)
|
||||
: -1
|
||||
},
|
||||
}
|
||||
|
||||
const linkEndParser: InlineParser = {
|
||||
name: 'LinkEnd',
|
||||
parse: (cx, next, start) => {
|
||||
if (next != 93 /* ']' */) return -1
|
||||
// Scanning back to the next link/image start marker
|
||||
const openDelim = cx.findOpeningDelimiter(LinkStart) ?? cx.findOpeningDelimiter(ImageStart)
|
||||
if (openDelim == null) return -1
|
||||
const part = cx.parts[openDelim] as InlineDelimiter
|
||||
// If this one has been set invalid (because it would produce
|
||||
// a nested link) or there's no valid link here ignore both.
|
||||
if (
|
||||
!part.side ||
|
||||
(cx.skipSpace(part.to) == start && !/[([]/.test(cx.slice(start + 1, start + 2)))
|
||||
) {
|
||||
cx.parts[openDelim] = null
|
||||
return -1
|
||||
}
|
||||
// Finish the content and replace the entire range in
|
||||
// this.parts with the link/image node.
|
||||
const content = cx.takeContent(openDelim)
|
||||
const link = (cx.parts[openDelim] = finishLink(
|
||||
cx,
|
||||
content,
|
||||
part.type == LinkStart ? getType(cx, 'Link') : getType(cx, 'Image'),
|
||||
part.from,
|
||||
start + 1,
|
||||
))
|
||||
// Set any open-link markers before this link to invalid.
|
||||
if (part.type == LinkStart)
|
||||
for (let j = 0; j < openDelim; j++) {
|
||||
const p = cx.parts[j]
|
||||
if (p != null && !(p instanceof Element) && p.type == LinkStart) p.side = Mark.None
|
||||
}
|
||||
return link.to
|
||||
},
|
||||
}
|
||||
|
||||
function finishLink(
|
||||
cx: InlineContext,
|
||||
content: Element[],
|
||||
type: number,
|
||||
start: number,
|
||||
startPos: number,
|
||||
) {
|
||||
const { text } = cx,
|
||||
next = cx.char(startPos)
|
||||
let endPos = startPos
|
||||
const LinkMarkType = getType(cx, 'LinkMark')
|
||||
const ImageType = getType(cx, 'Image')
|
||||
content.unshift(elt(LinkMarkType, start, start + (type == ImageType ? 2 : 1)))
|
||||
content.push(elt(LinkMarkType, startPos - 1, startPos))
|
||||
if (next == 40 /* '(' */) {
|
||||
let pos = cx.skipSpace(startPos + 1)
|
||||
const dest = parseURL(text, pos - cx.offset, cx.offset, getType(cx, 'URL'), LinkMarkType)
|
||||
let title
|
||||
if (dest) {
|
||||
const last = dest.at(-1)!
|
||||
pos = cx.skipSpace(last.to)
|
||||
// The destination and title must be separated by whitespace
|
||||
if (pos != last.to) {
|
||||
title = parseLinkTitle(text, pos - cx.offset, cx.offset, getType(cx, 'LinkTitle'))
|
||||
if (title) pos = cx.skipSpace(title.to)
|
||||
}
|
||||
}
|
||||
if (cx.char(pos) == 41 /* ')' */) {
|
||||
content.push(elt(LinkMarkType, startPos, startPos + 1))
|
||||
endPos = pos + 1
|
||||
if (dest) content.push(...dest)
|
||||
if (title) content.push(title)
|
||||
content.push(elt(LinkMarkType, pos, endPos))
|
||||
}
|
||||
} else if (next == 91 /* '[' */) {
|
||||
const label = parseLinkLabel(
|
||||
text,
|
||||
startPos - cx.offset,
|
||||
cx.offset,
|
||||
false,
|
||||
getType(cx, 'LinkLabelType'),
|
||||
)
|
||||
if (label) {
|
||||
content.push(label)
|
||||
endPos = label.to
|
||||
}
|
||||
}
|
||||
return elt(type, start, endPos, content)
|
||||
}
|
||||
|
||||
// These return `null` when falling off the end of the input, `false`
|
||||
// when parsing fails otherwise (for use in the incremental link
|
||||
// reference parser).
|
||||
function parseURL(
|
||||
text: string,
|
||||
start: number,
|
||||
offset: number,
|
||||
urlType: number,
|
||||
linkMarkType: number,
|
||||
): null | false | Element[] {
|
||||
const next = text.charCodeAt(start)
|
||||
if (next == 60 /* '<' */) {
|
||||
for (let pos = start + 1; pos < text.length; pos++) {
|
||||
const ch = text.charCodeAt(pos)
|
||||
if (ch == 62 /* '>' */)
|
||||
return [
|
||||
elt(linkMarkType, start + offset, start + offset + 1),
|
||||
elt(urlType, start + offset + 1, pos + offset),
|
||||
elt(linkMarkType, pos + offset, pos + offset + 1),
|
||||
]
|
||||
if (ch == 60 || ch == 10 /* '<\n' */) return false
|
||||
}
|
||||
return null
|
||||
} else {
|
||||
let depth = 0,
|
||||
pos = start
|
||||
for (let escaped = false; pos < text.length; pos++) {
|
||||
const ch = text.charCodeAt(pos)
|
||||
if (isSpace(ch)) {
|
||||
break
|
||||
} else if (escaped) {
|
||||
escaped = false
|
||||
} else if (ch == 40 /* '(' */) {
|
||||
depth++
|
||||
} else if (ch == 41 /* ')' */) {
|
||||
if (!depth) break
|
||||
depth--
|
||||
} else if (ch == 92 /* '\\' */) {
|
||||
escaped = true
|
||||
}
|
||||
}
|
||||
return (
|
||||
pos > start ? [elt(urlType, start + offset, pos + offset)]
|
||||
: pos == text.length ? null
|
||||
: false
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function parseLinkTitle(
|
||||
text: string,
|
||||
start: number,
|
||||
offset: number,
|
||||
linkTitleType: number,
|
||||
): null | false | Element {
|
||||
const next = text.charCodeAt(start)
|
||||
if (next != 39 && next != 34 && next != 40 /* '"\'(' */) return false
|
||||
const end = next == 40 ? 41 : next
|
||||
for (let pos = start + 1, escaped = false; pos < text.length; pos++) {
|
||||
const ch = text.charCodeAt(pos)
|
||||
if (escaped) escaped = false
|
||||
else if (ch == end) return elt(linkTitleType, start + offset, pos + 1 + offset)
|
||||
else if (ch == 92 /* '\\' */) escaped = true
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function parseLinkLabel(
|
||||
text: string,
|
||||
start: number,
|
||||
offset: number,
|
||||
requireNonWS: boolean,
|
||||
linkLabelType: number,
|
||||
): null | false | Element {
|
||||
for (
|
||||
let escaped = false, pos = start + 1, end = Math.min(text.length, pos + 999);
|
||||
pos < end;
|
||||
pos++
|
||||
) {
|
||||
const ch = text.charCodeAt(pos)
|
||||
if (escaped) escaped = false
|
||||
else if (ch == 93 /* ']' */)
|
||||
return requireNonWS ? false : elt(linkLabelType, start + offset, pos + 1 + offset)
|
||||
else {
|
||||
if (requireNonWS && !isSpace(ch)) requireNonWS = false
|
||||
if (ch == 91 /* '[' */) return false
|
||||
else if (ch == 92 /* '\\' */) escaped = true
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// === Debugging ===
|
||||
|
||||
/** Represents the structure of a @{link Tree} in a JSON-compatible format. */
|
||||
@ -207,12 +402,12 @@ export interface DebugTree {
|
||||
|
||||
// noinspection JSUnusedGlobalSymbols
|
||||
/** @returns A debug representation of the provided {@link Tree} */
|
||||
export function debugTree(tree: Tree): DebugTree {
|
||||
export function debugTree(tree: { cursor: () => TreeCursor }): DebugTree {
|
||||
const cursor = tree.cursor()
|
||||
let current: DebugTree[] = []
|
||||
const stack: DebugTree[][] = []
|
||||
cursor.iterate(
|
||||
(node) => {
|
||||
node => {
|
||||
const children: DebugTree[] = []
|
||||
current.push({
|
||||
name: node.name,
|
||||
@ -246,3 +441,25 @@ function isAtxHeading(line: Line) {
|
||||
function isSpace(ch: number) {
|
||||
return ch == 32 || ch == 9 || ch == 10 || ch == 13
|
||||
}
|
||||
|
||||
const ensoMarkdownLanguageExtension = {
|
||||
parseBlock: [headerParser, bulletList, orderedList, blockquoteParser, disableSetextHeading],
|
||||
parseInline: [linkParser, imageParser, linkEndParser],
|
||||
defineNodes: [blockquoteNode],
|
||||
}
|
||||
|
||||
/**
|
||||
* Lezer (CodeMirror) parser for the Enso documentation Markdown dialect.
|
||||
* Differences from CodeMirror's base Markdown language:
|
||||
* - It defines the flavor of Markdown supported in Enso documentation. Currently, this is mostly CommonMark except we
|
||||
* don't support setext headings. Planned features include support for some GFM extensions.
|
||||
* - Many of the parsers differ from the `@lezer/markdown` parsers in their treatment of whitespace, in order to support
|
||||
* a rendering mode where markup (and some associated spacing) is hidden.
|
||||
*/
|
||||
export const markdownParser: MarkdownParser = baseParser.configure([
|
||||
GFM,
|
||||
Subscript,
|
||||
Superscript,
|
||||
Emoji,
|
||||
ensoMarkdownLanguageExtension,
|
||||
])
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user