mirror of
https://github.com/enso-org/enso.git
synced 2024-12-19 17:21:44 +03:00
CodeEditor improvements (#11852)
This commit is contained in:
parent
63ed629210
commit
aebfacd486
@ -92,13 +92,13 @@
|
||||
"@ag-grid-enterprise/range-selection": "^32.3.3",
|
||||
"@babel/parser": "^7.24.7",
|
||||
"babel-plugin-react-compiler": "19.0.0-beta-a7bf2bd-20241110",
|
||||
"@codemirror/commands": "^6.6.0",
|
||||
"@codemirror/language": "^6.10.2",
|
||||
"@codemirror/commands": "^6.7.1",
|
||||
"@codemirror/language": "^6.10.6",
|
||||
"@codemirror/lang-markdown": "^v6.3.0",
|
||||
"@codemirror/lint": "^6.8.1",
|
||||
"@codemirror/search": "^6.5.6",
|
||||
"@codemirror/state": "^6.4.1",
|
||||
"@codemirror/view": "^6.35.0",
|
||||
"@codemirror/lint": "^6.8.4",
|
||||
"@codemirror/search": "^6.5.8",
|
||||
"@codemirror/state": "^6.5.0",
|
||||
"@codemirror/view": "^6.35.3",
|
||||
"@fast-check/vitest": "^0.0.8",
|
||||
"@floating-ui/vue": "^1.0.6",
|
||||
"@lezer/common": "^1.1.0",
|
||||
|
@ -4,45 +4,58 @@ import { ensoSyntax } from '@/components/CodeEditor/ensoSyntax'
|
||||
import { useEnsoSourceSync } from '@/components/CodeEditor/sync'
|
||||
import { ensoHoverTooltip } from '@/components/CodeEditor/tooltips'
|
||||
import CodeMirrorRoot from '@/components/CodeMirrorRoot.vue'
|
||||
import VueComponentHost from '@/components/VueComponentHost.vue'
|
||||
import { useGraphStore } from '@/stores/graph'
|
||||
import { useProjectStore } from '@/stores/project'
|
||||
import { useSuggestionDbStore } from '@/stores/suggestionDatabase'
|
||||
import { useAutoBlur } from '@/util/autoBlur'
|
||||
import { useCodeMirror } from '@/util/codemirror'
|
||||
import { highlightStyle } from '@/util/codemirror/highlight'
|
||||
import { testSupport } from '@/util/codemirror/testSupport'
|
||||
import { indentWithTab } from '@codemirror/commands'
|
||||
import {
|
||||
bracketMatching,
|
||||
defaultHighlightStyle,
|
||||
foldGutter,
|
||||
syntaxHighlighting,
|
||||
} from '@codemirror/language'
|
||||
import { indentWithTab, insertNewlineKeepIndent } from '@codemirror/commands'
|
||||
import { bracketMatching, foldGutter } from '@codemirror/language'
|
||||
import { lintGutter } from '@codemirror/lint'
|
||||
import { highlightSelectionMatches } from '@codemirror/search'
|
||||
import { keymap } from '@codemirror/view'
|
||||
import { type Highlighter } from '@lezer/highlight'
|
||||
import { minimalSetup } from 'codemirror'
|
||||
import { computed, onMounted, useTemplateRef, type ComponentInstance } from 'vue'
|
||||
import {
|
||||
computed,
|
||||
onMounted,
|
||||
toRef,
|
||||
useCssModule,
|
||||
useTemplateRef,
|
||||
type ComponentInstance,
|
||||
} from 'vue'
|
||||
|
||||
const projectStore = useProjectStore()
|
||||
const graphStore = useGraphStore()
|
||||
const suggestionDbStore = useSuggestionDbStore()
|
||||
|
||||
const vueComponentHost =
|
||||
useTemplateRef<ComponentInstance<typeof VueComponentHost>>('vueComponentHost')
|
||||
const editorRoot = useTemplateRef<ComponentInstance<typeof CodeMirrorRoot>>('editorRoot')
|
||||
const rootElement = computed(() => editorRoot.value?.rootElement)
|
||||
useAutoBlur(rootElement)
|
||||
|
||||
const autoindentOnEnter = {
|
||||
key: 'Enter',
|
||||
run: insertNewlineKeepIndent,
|
||||
}
|
||||
|
||||
const vueHost = computed(() => vueComponentHost.value || undefined)
|
||||
const { editorView, setExtraExtensions } = useCodeMirror(editorRoot, {
|
||||
extensions: [
|
||||
keymap.of([indentWithTab, autoindentOnEnter]),
|
||||
minimalSetup,
|
||||
syntaxHighlighting(defaultHighlightStyle as Highlighter),
|
||||
bracketMatching(),
|
||||
foldGutter(),
|
||||
lintGutter(),
|
||||
highlightSelectionMatches(),
|
||||
ensoSyntax(),
|
||||
ensoHoverTooltip(graphStore, suggestionDbStore),
|
||||
keymap.of([indentWithTab]),
|
||||
ensoSyntax(toRef(graphStore, 'moduleRoot')),
|
||||
highlightStyle(useCssModule()),
|
||||
ensoHoverTooltip(graphStore, suggestionDbStore, vueHost),
|
||||
],
|
||||
vueHost,
|
||||
})
|
||||
;(window as any).__codeEditorApi = testSupport(editorView)
|
||||
const { updateListener, connectModuleListener } = useEnsoSourceSync(
|
||||
@ -61,6 +74,7 @@ onMounted(() => {
|
||||
|
||||
<template>
|
||||
<CodeMirrorRoot ref="editorRoot" class="CodeEditor" @keydown.tab.stop.prevent />
|
||||
<VueComponentHost ref="vueComponentHost" />
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
@ -73,7 +87,6 @@ onMounted(() => {
|
||||
}
|
||||
|
||||
:deep(.cm-scroller) {
|
||||
font-family: var(--font-mono);
|
||||
/* Prevent touchpad back gesture, which can be triggered while panning. */
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
@ -113,3 +126,32 @@ onMounted(() => {
|
||||
min-width: 32px;
|
||||
}
|
||||
</style>
|
||||
|
||||
<!--suppress CssUnusedSymbol -->
|
||||
<style module>
|
||||
.keyword,
|
||||
.moduleKeyword,
|
||||
.modifier {
|
||||
color: #708;
|
||||
}
|
||||
.number {
|
||||
color: #164;
|
||||
}
|
||||
.string {
|
||||
color: #a11;
|
||||
}
|
||||
.escape {
|
||||
color: #e40;
|
||||
}
|
||||
.variableName,
|
||||
.definition-variableName {
|
||||
color: #00f;
|
||||
}
|
||||
.lineComment,
|
||||
.docComment {
|
||||
color: #940;
|
||||
}
|
||||
.invalid {
|
||||
color: #f00;
|
||||
}
|
||||
</style>
|
||||
|
@ -0,0 +1,47 @@
|
||||
<script setup lang="ts">
|
||||
import { type NodeId } from '@/stores/graph'
|
||||
import { type GraphDb } from '@/stores/graph/graphDatabase'
|
||||
import { type SuggestionDbStore } from '@/stores/suggestionDatabase'
|
||||
import { computed } from 'vue'
|
||||
|
||||
const { nodeId, syntax, graphDb, suggestionDbStore } = defineProps<{
|
||||
nodeId: NodeId | undefined
|
||||
syntax: string
|
||||
graphDb: GraphDb
|
||||
suggestionDbStore: SuggestionDbStore
|
||||
}>()
|
||||
|
||||
const expressionInfo = computed(() => nodeId && graphDb.getExpressionInfo(nodeId))
|
||||
const typeName = computed(
|
||||
() => expressionInfo.value && (expressionInfo.value.typename ?? 'Unknown'),
|
||||
)
|
||||
const executionTimeMs = computed(
|
||||
() =>
|
||||
expressionInfo.value?.profilingInfo[0] &&
|
||||
(expressionInfo.value.profilingInfo[0].ExecutionTime.nanoTime / 1_000_000).toFixed(3),
|
||||
)
|
||||
const method = computed(() => expressionInfo.value?.methodCall?.methodPointer)
|
||||
const group = computed(() => {
|
||||
const id = method.value && suggestionDbStore.entries.findByMethodPointer(method.value)
|
||||
if (id == null) return
|
||||
const suggestionEntry = suggestionDbStore.entries.get(id)
|
||||
if (!suggestionEntry) return
|
||||
const groupIndex = suggestionEntry.groupIndex
|
||||
if (groupIndex == null) return
|
||||
const group = suggestionDbStore.groups[groupIndex]
|
||||
if (!group) return
|
||||
return {
|
||||
name: `${group.project}.${group.name}`,
|
||||
color: group.color,
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div v-if="nodeId">AST ID: {{ nodeId }}</div>
|
||||
<div v-if="typeName">Type: {{ typeName }}</div>
|
||||
<div v-if="executionTimeMs != null">Execution Time: {{ executionTimeMs }}ms</div>
|
||||
<div>Syntax: {{ syntax }}</div>
|
||||
<div v-if="method">Method: {{ method.module }}.{{ method.name }}</div>
|
||||
<div v-if="group" :style="{ color: group.color }">Group: {{ group.name }}</div>
|
||||
</template>
|
@ -1,74 +1,20 @@
|
||||
import { type GraphStore } from '@/stores/graph'
|
||||
import { type ProjectStore } from '@/stores/project'
|
||||
import { valueExt } from '@/util/codemirror/stateEffect'
|
||||
import { type Diagnostic, forceLinting, linter } from '@codemirror/lint'
|
||||
import { type Extension, StateEffect, StateField } from '@codemirror/state'
|
||||
import { type Extension } from '@codemirror/state'
|
||||
import { type EditorView } from '@codemirror/view'
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import { computed, watch } from 'vue'
|
||||
import { type Diagnostic as LSDiagnostic, type Position } from 'ydoc-shared/languageServerTypes'
|
||||
import { computed, watchEffect } from 'vue'
|
||||
import { type SourceRange } from 'ydoc-shared/util/data/text'
|
||||
import { type ExternalId } from 'ydoc-shared/yjsModel'
|
||||
|
||||
// Effect that can be applied to the document to invalidate the linter state.
|
||||
const diagnosticsUpdated = StateEffect.define()
|
||||
// State value that is perturbed by any `diagnosticsUpdated` effect.
|
||||
const diagnosticsVersion = StateField.define({
|
||||
create: (_state) => 0,
|
||||
update: (value, transaction) => {
|
||||
for (const effect of transaction.effects) {
|
||||
if (effect.is(diagnosticsUpdated)) value += 1
|
||||
}
|
||||
return value
|
||||
},
|
||||
})
|
||||
|
||||
/** Given a text, indexes it and returns a function for converting between different ways of identifying positions. */
|
||||
function stringPosConverter(text: string) {
|
||||
let pos = 0
|
||||
const lineStartIndex: number[] = []
|
||||
for (const line of text.split('\n')) {
|
||||
lineStartIndex.push(pos)
|
||||
pos += line.length + 1
|
||||
}
|
||||
const length = text.length
|
||||
|
||||
function lineColToIndex({
|
||||
line,
|
||||
character,
|
||||
}: {
|
||||
line: number
|
||||
character: number
|
||||
}): number | undefined {
|
||||
const startIx = lineStartIndex[line]
|
||||
if (startIx == null) return
|
||||
const ix = startIx + character
|
||||
if (ix > length) return
|
||||
return ix
|
||||
}
|
||||
|
||||
return { lineColToIndex }
|
||||
}
|
||||
|
||||
/** Convert the Language Server's diagnostics to CodeMirror diagnostics. */
|
||||
function lsDiagnosticsToCMDiagnostics(
|
||||
diagnostics: LSDiagnostic[],
|
||||
lineColToIndex: (lineCol: Position) => number | undefined,
|
||||
) {
|
||||
const results: Diagnostic[] = []
|
||||
for (const diagnostic of diagnostics) {
|
||||
if (!diagnostic.location) continue
|
||||
const from = lineColToIndex(diagnostic.location.start)
|
||||
const to = lineColToIndex(diagnostic.location.end)
|
||||
if (to == null || from == null) {
|
||||
// Suppress temporary errors if the source is not the version of the document the LS is reporting diagnostics for.
|
||||
continue
|
||||
}
|
||||
const severity =
|
||||
diagnostic.kind === 'Error' ? 'error'
|
||||
: diagnostic.kind === 'Warning' ? 'warning'
|
||||
: 'info'
|
||||
results.push({ from, to, message: diagnostic.message, severity })
|
||||
}
|
||||
return results
|
||||
}
|
||||
const {
|
||||
set: setDiagnostics,
|
||||
get: getDiagnostics,
|
||||
changed: diagnosticsChanged,
|
||||
extension: stateExt,
|
||||
} = valueExt<Diagnostic[], Diagnostic[] | undefined>(undefined)
|
||||
|
||||
/**
|
||||
* CodeMirror extension providing diagnostics for an Enso module. Provides CodeMirror diagnostics based on dataflow
|
||||
@ -79,6 +25,10 @@ export function useEnsoDiagnostics(
|
||||
graphStore: Pick<GraphStore, 'moduleSource' | 'db'>,
|
||||
editorView: EditorView,
|
||||
): Extension {
|
||||
function spanOfExternalId(externalId: ExternalId): SourceRange | undefined {
|
||||
const astId = graphStore.db.idFromExternal(externalId)
|
||||
return astId && graphStore.moduleSource.getSpan(astId)
|
||||
}
|
||||
const expressionUpdatesDiagnostics = computed(() => {
|
||||
const updates = projectStore.computedValueRegistry.db
|
||||
const panics = updates.type.reverseLookup('Panic')
|
||||
@ -87,11 +37,9 @@ export function useEnsoDiagnostics(
|
||||
for (const externalId of iter.chain(panics, errors)) {
|
||||
const update = updates.get(externalId)
|
||||
if (!update) continue
|
||||
const astId = graphStore.db.idFromExternal(externalId)
|
||||
if (!astId) continue
|
||||
const span = graphStore.moduleSource.getSpan(astId)
|
||||
const span = spanOfExternalId(externalId)
|
||||
if (!span) continue
|
||||
const [from, to] = span
|
||||
const { from, to } = span
|
||||
switch (update.payload.type) {
|
||||
case 'Panic': {
|
||||
diagnostics.push({ from, to, message: update.payload.message, severity: 'error' })
|
||||
@ -108,27 +56,30 @@ export function useEnsoDiagnostics(
|
||||
}
|
||||
return diagnostics
|
||||
})
|
||||
// The LS protocol doesn't identify what version of the file updates are in reference to. When diagnostics are
|
||||
// received from the LS, we map them to the text assuming that they are applicable to the current version of the
|
||||
// module. This will be correct if there is no one else editing, and we aren't editing faster than the LS can send
|
||||
// updates. Typing too quickly can result in incorrect ranges, but at idle it should correct itself when we receive
|
||||
// new diagnostics.
|
||||
const executionContextDiagnostics = computed(() => {
|
||||
const { lineColToIndex } = stringPosConverter(graphStore.moduleSource.text)
|
||||
return lsDiagnosticsToCMDiagnostics(projectStore.diagnostics, lineColToIndex)
|
||||
})
|
||||
watch([executionContextDiagnostics, expressionUpdatesDiagnostics], () => {
|
||||
editorView.dispatch({ effects: diagnosticsUpdated.of(null) })
|
||||
const executionContextDiagnostics = computed<Diagnostic[]>(() =>
|
||||
projectStore.diagnostics.flatMap((diagnostic) => {
|
||||
const span = diagnostic.expressionId && spanOfExternalId(diagnostic.expressionId)
|
||||
if (!span) return []
|
||||
const { from, to } = span
|
||||
const severity =
|
||||
diagnostic.kind === 'Error' ? 'error'
|
||||
: diagnostic.kind === 'Warning' ? 'warning'
|
||||
: 'info'
|
||||
return [{ from, to, message: diagnostic.message, severity }]
|
||||
}),
|
||||
)
|
||||
watchEffect(() => {
|
||||
const diagnostics = [
|
||||
...expressionUpdatesDiagnostics.value,
|
||||
...executionContextDiagnostics.value,
|
||||
]
|
||||
editorView.dispatch({ effects: setDiagnostics.of(diagnostics) })
|
||||
forceLinting(editorView)
|
||||
})
|
||||
return [
|
||||
diagnosticsVersion,
|
||||
linter(() => [...executionContextDiagnostics.value, ...expressionUpdatesDiagnostics.value], {
|
||||
needsRefresh(update) {
|
||||
return (
|
||||
update.state.field(diagnosticsVersion) !== update.startState.field(diagnosticsVersion)
|
||||
)
|
||||
},
|
||||
stateExt,
|
||||
linter((view) => view.state.facet(getDiagnostics) ?? [], {
|
||||
needsRefresh: diagnosticsChanged,
|
||||
}),
|
||||
]
|
||||
}
|
||||
|
@ -1,12 +1,13 @@
|
||||
import { RawAstExtended } from '@/util/ast/extended'
|
||||
import { RawAst } from '@/util/ast/raw'
|
||||
import { Ast } from '@/util/ast'
|
||||
import {
|
||||
defineLanguageFacet,
|
||||
foldNodeProp,
|
||||
indentUnit,
|
||||
Language,
|
||||
languageDataProp,
|
||||
LanguageSupport,
|
||||
} from '@codemirror/language'
|
||||
import { type Extension } from '@codemirror/state'
|
||||
import {
|
||||
type Input,
|
||||
NodeProp,
|
||||
@ -17,73 +18,117 @@ import {
|
||||
Tree,
|
||||
} from '@lezer/common'
|
||||
import { styleTags, tags } from '@lezer/highlight'
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import { type Ref } from 'vue'
|
||||
import { spanMapToSpanGetter, tokenSpanGetter } from 'ydoc-shared/ast/idMap'
|
||||
import { assertDefined } from 'ydoc-shared/util/assert'
|
||||
import { rangeLength, type SourceRange } from 'ydoc-shared/util/data/text'
|
||||
|
||||
const nodeTypes: NodeType[] = [
|
||||
...RawAst.Tree.typeNames.map((name, id) => NodeType.define({ id, name })),
|
||||
...RawAst.Token.typeNames.map((name, id) =>
|
||||
NodeType.define({ id: id + RawAst.Tree.typeNames.length, name: 'Token' + name }),
|
||||
),
|
||||
]
|
||||
const treeTypeNames = Ast.astTypes
|
||||
const tokenTypeNames = Ast.tokenTypes.map((name) => `Token${name}`)
|
||||
const nodeTypes: NodeType[] = [...treeTypeNames, ...tokenTypeNames].map((name, id) =>
|
||||
NodeType.define({ id, name }),
|
||||
)
|
||||
|
||||
const nodeSet = new NodeSet(nodeTypes).extend(
|
||||
styleTags({
|
||||
Ident: tags.variableName,
|
||||
'Private!': tags.variableName,
|
||||
Number: tags.number,
|
||||
'Wildcard!': tags.variableName,
|
||||
Wildcard: tags.variableName,
|
||||
TokenIdent: tags.name,
|
||||
'PropertyAccess/TokenIdent': tags.propertyName,
|
||||
'NumericLiteral!': tags.number,
|
||||
'FunctionDef/Ident': tags.definition(tags.function(tags.variableName)),
|
||||
'Assignment/Ident': tags.definition(tags.variableName),
|
||||
'Import/TokenIdent': tags.moduleKeyword,
|
||||
// Strings and comments
|
||||
'TextLiteral!': tags.string,
|
||||
OprApp: tags.operator,
|
||||
TokenTextEscape: tags.escape,
|
||||
TokenTextStart: tags.docComment,
|
||||
TokenTextSection: tags.docComment,
|
||||
TokenNewline: tags.lineComment,
|
||||
TokenInvalid: tags.invalid,
|
||||
// Open/close tokens
|
||||
'Group/TokenOpenSymbol': tags.paren,
|
||||
'Group/TokenCloseSymbol': tags.paren,
|
||||
'Vector/TokenOpenSymbol': tags.squareBracket,
|
||||
'Vector/TokenCloseSymbol': tags.squareBracket,
|
||||
// Operator tokens
|
||||
TokenOperator: tags.operator,
|
||||
'Assignment/TokenOperator': tags.definitionOperator,
|
||||
UnaryOprApp: tags.operator,
|
||||
'Function/Ident': tags.function(tags.variableName),
|
||||
ForeignFunction: tags.function(tags.variableName),
|
||||
'Import/TokenIdent': tags.function(tags.moduleKeyword),
|
||||
Export: tags.function(tags.moduleKeyword),
|
||||
Lambda: tags.function(tags.variableName),
|
||||
Documented: tags.docComment,
|
||||
ConstructorDefinition: tags.function(tags.variableName),
|
||||
TokenDotOperator: tags.operator,
|
||||
TokenCommaOperator: tags.separator,
|
||||
TokenUnaryOperator: tags.operator,
|
||||
TokenAnnotationOperator: tags.operator,
|
||||
TokenAutoscopeOperator: tags.operator,
|
||||
TokenLambdaOperator: tags.function(tags.controlOperator),
|
||||
TokenSuspendedDefaultArguments: tags.controlOperator,
|
||||
TokenSuspensionOperator: tags.controlOperator,
|
||||
TokenAssignmentOperator: tags.definitionOperator,
|
||||
TokenTypeAnnotationOperator: tags.typeOperator,
|
||||
TokenArrowOperator: tags.typeOperator,
|
||||
TokenNegationOperator: tags.number,
|
||||
// Keyword tokens
|
||||
TokenAllKeyword: tags.moduleKeyword,
|
||||
TokenCaseKeyword: tags.controlKeyword,
|
||||
TokenOfKeyword: tags.controlKeyword,
|
||||
TokenPrivateKeyword: tags.modifier,
|
||||
TokenTypeKeyword: tags.definitionKeyword,
|
||||
TokenForeignKeyword: tags.modifier,
|
||||
}),
|
||||
foldNodeProp.add({
|
||||
Function: (node) => node.lastChild,
|
||||
ArgumentBlockApplication: (node) => node,
|
||||
OperatorBlockApplication: (node) => node,
|
||||
BodyBlock: (node) => (node.from === 0 ? null : node),
|
||||
}),
|
||||
)
|
||||
const typeByName = new Map(nodeSet.types.map((type) => [type.name, type]))
|
||||
|
||||
export type AstNode = RawAstExtended<RawAst.Tree | RawAst.Token, false>
|
||||
export type AstNode = Ast.Ast | Ast.Token
|
||||
export const astProp = new NodeProp<AstNode>({ perNode: true })
|
||||
|
||||
function astToCodeMirrorTree(
|
||||
nodeSet: NodeSet,
|
||||
ast: AstNode,
|
||||
ast: Ast.Ast,
|
||||
getSpan: (id: Ast.AstId) => SourceRange | undefined,
|
||||
getTokenSpan: (token: Ast.Token) => SourceRange | undefined,
|
||||
props?: readonly [number | NodeProp<any>, any][] | undefined,
|
||||
): Tree {
|
||||
const [start, end] = ast.span()
|
||||
const children = ast.children()
|
||||
const nodeType = typeByName.get(ast.typeName)
|
||||
assertDefined(nodeType)
|
||||
const children = new Array<Tree>()
|
||||
const childrenPositions = new Array<number>()
|
||||
const { from, to } = getSpan(ast.id)!
|
||||
for (const child of ast.children()) {
|
||||
if (child instanceof Ast.Ast) {
|
||||
children.push(astToCodeMirrorTree(child, getSpan, getTokenSpan))
|
||||
childrenPositions.push(getSpan(child.id)!.from - from)
|
||||
} else {
|
||||
if (child.code().length === 0) continue
|
||||
const childSpan = getTokenSpan(child)
|
||||
assertDefined(childSpan)
|
||||
const tokenTree = tokenToCodeMirrorTree(child, childSpan)
|
||||
if (tokenTree) {
|
||||
children.push(tokenTree)
|
||||
childrenPositions.push(childSpan.from - from)
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Tree(nodeType, children, childrenPositions, to - from, [
|
||||
...(props ?? []),
|
||||
[astProp, ast],
|
||||
])
|
||||
}
|
||||
|
||||
const childrenToConvert = iter.tryGetSoleValue(children)?.isToken() ? [] : children
|
||||
|
||||
return new Tree(
|
||||
nodeSet.types[ast.inner.type + (ast.isToken() ? RawAst.Tree.typeNames.length : 0)]!,
|
||||
childrenToConvert.map((child) => astToCodeMirrorTree(nodeSet, child)),
|
||||
childrenToConvert.map((child) => child.span()[0] - start),
|
||||
end - start,
|
||||
[...(props ?? []), [astProp, ast]],
|
||||
)
|
||||
function tokenToCodeMirrorTree(token: Ast.Token, span: SourceRange): Tree | undefined {
|
||||
if (token.typeName === 'Raw') return
|
||||
const type = typeByName.get(`Token${token.typeName}`)
|
||||
assertDefined(type)
|
||||
return new Tree(type, [], [], rangeLength(span), [[astProp, token]])
|
||||
}
|
||||
|
||||
const facet = defineLanguageFacet()
|
||||
|
||||
class EnsoParser extends Parser {
|
||||
nodeSet
|
||||
constructor() {
|
||||
private cachedCode: string | undefined
|
||||
private cachedTree: Tree | undefined
|
||||
constructor(private readonly moduleRoot: Readonly<Ref<Ast.BodyBlock | undefined>>) {
|
||||
super()
|
||||
this.nodeSet = nodeSet
|
||||
}
|
||||
cachedCode: string | undefined
|
||||
cachedTree: Tree | undefined
|
||||
createParse(input: Input): PartialParse {
|
||||
return {
|
||||
parsedPos: input.length,
|
||||
@ -93,8 +138,19 @@ class EnsoParser extends Parser {
|
||||
const code = input.read(0, input.length)
|
||||
if (code !== this.cachedCode || this.cachedTree == null) {
|
||||
this.cachedCode = code
|
||||
const ast = RawAstExtended.parse(code)
|
||||
this.cachedTree = astToCodeMirrorTree(this.nodeSet, ast, [[languageDataProp, facet]])
|
||||
assertDefined(this.moduleRoot.value)
|
||||
const root = Ast.copyIntoNewModule(this.moduleRoot.value)
|
||||
const tempModule = root.module
|
||||
root.module.setRoot(root)
|
||||
root.syncToCode(code)
|
||||
const parsedRoot = tempModule.root()
|
||||
assertDefined(parsedRoot)
|
||||
const { info: spans } = Ast.printWithSpans(parsedRoot)
|
||||
const getSpan = spanMapToSpanGetter(spans.nodes)
|
||||
const getTokenSpan = tokenSpanGetter(spans.tokens)
|
||||
this.cachedTree = astToCodeMirrorTree(parsedRoot, getSpan, getTokenSpan, [
|
||||
[languageDataProp, facet],
|
||||
])
|
||||
}
|
||||
return this.cachedTree
|
||||
},
|
||||
@ -102,15 +158,9 @@ class EnsoParser extends Parser {
|
||||
}
|
||||
}
|
||||
|
||||
class EnsoLanguage extends Language {
|
||||
constructor() {
|
||||
super(facet, new EnsoParser())
|
||||
}
|
||||
}
|
||||
|
||||
const ensoLanguage = new EnsoLanguage()
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function ensoSyntax() {
|
||||
return new LanguageSupport(ensoLanguage)
|
||||
export function ensoSyntax(moduleRoot: Readonly<Ref<Ast.BodyBlock | undefined>>): Extension {
|
||||
return new LanguageSupport(new Language(facet, new EnsoParser(moduleRoot)), [
|
||||
indentUnit.of(' '),
|
||||
])
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { type GraphStore } from '@/stores/graph'
|
||||
import { type ProjectStore } from '@/stores/project'
|
||||
import { useToast } from '@/util/toast.ts'
|
||||
import { changeSetToTextEdits } from '@/util/codemirror/text'
|
||||
import { useToast } from '@/util/toast'
|
||||
import {
|
||||
Annotation,
|
||||
ChangeSet,
|
||||
type ChangeSpec,
|
||||
type EditorSelection,
|
||||
type Extension,
|
||||
type Text,
|
||||
@ -16,18 +16,6 @@ import { MutableModule } from 'ydoc-shared/ast'
|
||||
import { SourceRangeEdit, textChangeToEdits } from 'ydoc-shared/util/data/text'
|
||||
import { type Origin } from 'ydoc-shared/yjsModel'
|
||||
|
||||
function changeSetToTextEdits(changes: ChangeSet) {
|
||||
const textEdits = new Array<SourceRangeEdit>()
|
||||
changes.iterChanges((from, to, _fromB, _toB, insert) =>
|
||||
textEdits.push({ range: [from, to], insert: insert.toString() }),
|
||||
)
|
||||
return textEdits
|
||||
}
|
||||
|
||||
function textEditToChangeSpec({ range: [from, to], insert }: SourceRangeEdit): ChangeSpec {
|
||||
return { from, to, insert }
|
||||
}
|
||||
|
||||
// Indicates a change updating the text to correspond to the given module state.
|
||||
const synchronizedModule = Annotation.define<MutableModule>()
|
||||
|
||||
@ -78,7 +66,7 @@ export function useEnsoSourceSync(
|
||||
currentModule = undefined
|
||||
const viewText = editorView.state.doc.toString()
|
||||
const code = graphStore.moduleSource.text
|
||||
const changes = textChangeToEdits(viewText, code).map(textEditToChangeSpec)
|
||||
const changes = textChangeToEdits(viewText, code)
|
||||
console.info('Resetting the editor to the module code.', changes)
|
||||
editorView.dispatch({
|
||||
changes,
|
||||
@ -139,7 +127,7 @@ export function useEnsoSourceSync(
|
||||
|
||||
// If none of the above exit-conditions were reached, the transaction is applicable to our current state.
|
||||
editorView.dispatch({
|
||||
changes: textEdits.map(textEditToChangeSpec),
|
||||
changes: textEdits,
|
||||
annotations: synchronizedModule.of(graphStore.startEdit()),
|
||||
})
|
||||
}
|
||||
|
@ -1,7 +1,10 @@
|
||||
import { type AstNode, astProp } from '@/components/CodeEditor/ensoSyntax'
|
||||
import { type GraphStore, type NodeId } from '@/stores/graph'
|
||||
import CodeEditorTooltip from '@/components/CodeEditor/CodeEditorTooltip.vue'
|
||||
import { astProp } from '@/components/CodeEditor/ensoSyntax'
|
||||
import { type VueHost } from '@/components/VueComponentHost.vue'
|
||||
import { type GraphStore } from '@/stores/graph'
|
||||
import { type SuggestionDbStore } from '@/stores/suggestionDatabase'
|
||||
import { qnJoin, tryQualifiedName } from '@/util/qualifiedName'
|
||||
import { Ast } from '@/util/ast'
|
||||
import { type ToValue } from '@/util/reactivity'
|
||||
import { syntaxTree } from '@codemirror/language'
|
||||
import { type Extension } from '@codemirror/state'
|
||||
import {
|
||||
@ -11,13 +14,13 @@ import {
|
||||
type TooltipView,
|
||||
} from '@codemirror/view'
|
||||
import { type SyntaxNode } from '@lezer/common'
|
||||
import { unwrap } from 'ydoc-shared/util/data/result'
|
||||
import { rangeEncloses } from 'ydoc-shared/yjsModel'
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import { h, markRaw, toValue } from 'vue'
|
||||
import { syntaxNodeAncestors } from 'ydoc-shared/util/lezer'
|
||||
|
||||
/** TODO: Add docs */
|
||||
function hoverTooltip(
|
||||
create: (
|
||||
ast: AstNode,
|
||||
syntax: SyntaxNode,
|
||||
) => TooltipView | ((view: EditorView) => TooltipView) | null | undefined,
|
||||
): Extension {
|
||||
@ -25,9 +28,7 @@ function hoverTooltip(
|
||||
tooltips({ position: 'absolute' }),
|
||||
originalHoverTooltip((view, pos, side) => {
|
||||
const syntaxNode = syntaxTree(view.state).resolveInner(pos, side)
|
||||
const astNode = syntaxNode.tree?.prop(astProp)
|
||||
if (astNode == null) return null
|
||||
const domOrCreate = create(astNode, syntaxNode)
|
||||
const domOrCreate = create(syntaxNode)
|
||||
if (domOrCreate == null) return null
|
||||
|
||||
return {
|
||||
@ -41,62 +42,34 @@ function hoverTooltip(
|
||||
]
|
||||
}
|
||||
|
||||
function codeEditorTooltip(vueHost: VueHost, props: typeof CodeEditorTooltip.props): TooltipView {
|
||||
const dom = markRaw(document.createElement('div'))
|
||||
dom.classList.add('CodeEditorTooltip')
|
||||
const vueHostRegistration = vueHost.register(h(CodeEditorTooltip, props), dom)
|
||||
return { dom, destroy: vueHostRegistration.unregister }
|
||||
}
|
||||
|
||||
/** @returns A CodeMirror extension that creates tooltips containing type and syntax information for Enso code. */
|
||||
export function ensoHoverTooltip(
|
||||
graphStore: Pick<GraphStore, 'moduleSource' | 'db'>,
|
||||
suggestionDbStore: Pick<SuggestionDbStore, 'entries'>,
|
||||
suggestionDbStore: SuggestionDbStore,
|
||||
vueHost: ToValue<VueHost | undefined>,
|
||||
) {
|
||||
return hoverTooltip((ast, syn) => {
|
||||
const dom = document.createElement('div')
|
||||
const astSpan = ast.span()
|
||||
let foundNode: NodeId | undefined
|
||||
for (const [id, node] of graphStore.db.nodeIdToNode.entries()) {
|
||||
const rootSpan = graphStore.moduleSource.getSpan(node.rootExpr.id)
|
||||
if (rootSpan && rangeEncloses(rootSpan, astSpan)) {
|
||||
foundNode = id
|
||||
break
|
||||
}
|
||||
return hoverTooltip((syn) => {
|
||||
const vueHostValue = toValue(vueHost)
|
||||
if (!vueHostValue) {
|
||||
console.error('Cannot render tooltip without Vue host.')
|
||||
return
|
||||
}
|
||||
const expressionInfo = foundNode && graphStore.db.getExpressionInfo(foundNode)
|
||||
const nodeColor = foundNode && graphStore.db.getNodeColorStyle(foundNode)
|
||||
|
||||
if (foundNode != null) {
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`AST ID: ${foundNode}`))
|
||||
}
|
||||
if (expressionInfo != null) {
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`Type: ${expressionInfo.typename ?? 'Unknown'}`))
|
||||
}
|
||||
if (expressionInfo?.profilingInfo[0] != null) {
|
||||
const profile = expressionInfo.profilingInfo[0]
|
||||
const executionTime = (profile.ExecutionTime.nanoTime / 1_000_000).toFixed(3)
|
||||
const text = `Execution Time: ${executionTime}ms`
|
||||
dom.appendChild(document.createElement('div')).appendChild(document.createTextNode(text))
|
||||
}
|
||||
|
||||
dom
|
||||
.appendChild(document.createElement('div'))
|
||||
.appendChild(document.createTextNode(`Syntax: ${syn.toString()}`))
|
||||
const method = expressionInfo?.methodCall?.methodPointer
|
||||
if (method != null) {
|
||||
const moduleName = tryQualifiedName(method.module)
|
||||
const methodName = tryQualifiedName(method.name)
|
||||
const qualifiedName = qnJoin(unwrap(moduleName), unwrap(methodName))
|
||||
const [id] = suggestionDbStore.entries.nameToId.lookup(qualifiedName)
|
||||
const suggestionEntry = id != null ? suggestionDbStore.entries.get(id) : undefined
|
||||
if (suggestionEntry != null) {
|
||||
const groupNode = dom.appendChild(document.createElement('div'))
|
||||
groupNode.appendChild(document.createTextNode('Group: '))
|
||||
const groupNameNode = groupNode.appendChild(document.createElement('span'))
|
||||
groupNameNode.appendChild(document.createTextNode(`${method.module}.${method.name}`))
|
||||
if (nodeColor) {
|
||||
groupNameNode.style.color = nodeColor
|
||||
}
|
||||
}
|
||||
}
|
||||
return { dom }
|
||||
const enclosingAstNodes = iter.map(syntaxNodeAncestors(syn), (syn) => syn.tree?.prop(astProp))
|
||||
const enclosingAsts = iter.filter(enclosingAstNodes, (node) => node instanceof Ast.Ast)
|
||||
const enclosingExternalIds = iter.map(enclosingAsts, ({ externalId }) => externalId)
|
||||
const nodeId = iter.find(enclosingExternalIds, graphStore.db.isNodeId.bind(graphStore.db))
|
||||
return codeEditorTooltip(vueHostValue, {
|
||||
nodeId,
|
||||
syntax: syn.name,
|
||||
graphDb: graphStore.db,
|
||||
suggestionDbStore: suggestionDbStore,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
makeStaticMethod,
|
||||
} from '@/stores/suggestionDatabase/entry'
|
||||
import { assert } from '@/util/assert'
|
||||
import { Ast } from '@/util/ast'
|
||||
import { expect, test } from 'vitest'
|
||||
import { ref, type Ref } from 'vue'
|
||||
import type { Opt } from 'ydoc-shared/util/data/opt'
|
||||
@ -40,28 +41,29 @@ const staticMethod = {
|
||||
}
|
||||
|
||||
test.each`
|
||||
code | callSuggestion | subjectSpan | attachedSpan | subjectType | methodName
|
||||
${'val1.method val2'} | ${method} | ${[0, 4]} | ${[0, 4]} | ${'local.Project.Type'} | ${'.method'}
|
||||
${'local.Project.Type.method val1 val2'} | ${method} | ${[0, 18]} | ${[26, 30]} | ${'local.Project.Type.type'} | ${'.method'}
|
||||
${'Type.method val1'} | ${method} | ${[0, 4]} | ${[12, 16]} | ${'local.Project.Type.type'} | ${'.method'}
|
||||
${'local.Project.Type.method'} | ${method} | ${[0, 18]} | ${null} | ${'local.Project.Type.type'} | ${'.method'}
|
||||
${'foo.method'} | ${method} | ${[0, 3]} | ${null} | ${'local.Project.Type.type'} | ${'.method'}
|
||||
${'foo.method'} | ${method} | ${[0, 3]} | ${[0, 3]} | ${'local.Project.Type'} | ${'.method'}
|
||||
${'local.Project.Type.static_method val1'} | ${staticMethod} | ${[0, 18]} | ${[0, 18]} | ${'local.Project.Type.type'} | ${'.static_method'}
|
||||
${'Type.Con val1'} | ${con} | ${[0, 4]} | ${[0, 4]} | ${'local.Project.Type.type'} | ${'.Con'}
|
||||
${'..Con val1'} | ${con} | ${null} | ${null} | ${null} | ${'.Con'}
|
||||
${'local.Project.module_method val1'} | ${moduleMethod} | ${[0, 13]} | ${[0, 13]} | ${'local.Project'} | ${'.module_method'}
|
||||
code | callSuggestion | subjectSpan | attachedSpan | subjectType | methodName
|
||||
${'val1.method val2'} | ${method} | ${{ from: 0, to: 4 }} | ${{ from: 0, to: 4 }} | ${'local.Project.Type'} | ${'.method'}
|
||||
${'local.Project.Type.method val1 val2'} | ${method} | ${{ from: 0, to: 18 }} | ${{ from: 26, to: 30 }} | ${'local.Project.Type.type'} | ${'.method'}
|
||||
${'Type.method val1'} | ${method} | ${{ from: 0, to: 4 }} | ${{ from: 12, to: 16 }} | ${'local.Project.Type.type'} | ${'.method'}
|
||||
${'local.Project.Type.method'} | ${method} | ${{ from: 0, to: 18 }} | ${null} | ${'local.Project.Type.type'} | ${'.method'}
|
||||
${'foo.method'} | ${method} | ${{ from: 0, to: 3 }} | ${null} | ${'local.Project.Type.type'} | ${'.method'}
|
||||
${'foo.method'} | ${method} | ${{ from: 0, to: 3 }} | ${{ from: 0, to: 3 }} | ${'local.Project.Type'} | ${'.method'}
|
||||
${'local.Project.Type.static_method val1'} | ${staticMethod} | ${{ from: 0, to: 18 }} | ${{ from: 0, to: 18 }} | ${'local.Project.Type.type'} | ${'.static_method'}
|
||||
${'Type.Con val1'} | ${con} | ${{ from: 0, to: 4 }} | ${{ from: 0, to: 4 }} | ${'local.Project.Type.type'} | ${'.Con'}
|
||||
${'..Con val1'} | ${con} | ${null} | ${null} | ${null} | ${'.Con'}
|
||||
${'local.Project.module_method val1'} | ${moduleMethod} | ${{ from: 0, to: 13 }} | ${{ from: 0, to: 13 }} | ${'local.Project'} | ${'.module_method'}
|
||||
`(
|
||||
'Visualization config for $code',
|
||||
({ code, callSuggestion, subjectSpan, attachedSpan, subjectType, methodName }) => {
|
||||
const spans = {
|
||||
entireFunction: [0, code.length] as [number, number],
|
||||
...(subjectSpan != null ? { subject: subjectSpan as [number, number] } : {}),
|
||||
...(attachedSpan != null ? { attached: attachedSpan as [number, number] } : {}),
|
||||
entireFunction: { from: 0, to: code.length },
|
||||
...(subjectSpan != null ? { subject: subjectSpan } : {}),
|
||||
...(attachedSpan != null ? { attached: attachedSpan } : {}),
|
||||
}
|
||||
const { ast, eid, id } = parseWithSpans(code, spans)
|
||||
const node = (ast.lines[0]?.statement?.node as Ast.ExpressionStatement).expression
|
||||
assert(node != null)
|
||||
const statement = ast.lines[0]?.statement?.node
|
||||
assert(statement instanceof Ast.ExpressionStatement)
|
||||
const node = statement.expression
|
||||
expect(node.externalId).toBe(eid('entireFunction'))
|
||||
|
||||
let visConfig: Ref<Opt<NodeVisualizationConfiguration>> | undefined
|
||||
|
@ -7,7 +7,7 @@ import { isToken } from 'ydoc-shared/ast'
|
||||
|
||||
const props = defineProps(widgetProps(widgetDefinition))
|
||||
|
||||
const spanClass = computed(() => props.input.value.typeName())
|
||||
const spanClass = computed(() => props.input.value.typeName)
|
||||
|
||||
function* expressionChildren(expression: Ast.Expression) {
|
||||
for (const child of expression.children()) {
|
||||
|
@ -4,7 +4,7 @@ import { computed } from 'vue'
|
||||
|
||||
const props = defineProps(widgetProps(widgetDefinition))
|
||||
|
||||
const spanClass = computed(() => props.input.value.typeName())
|
||||
const spanClass = computed(() => props.input.value.typeName)
|
||||
const repr = computed(() => props.input.value.code())
|
||||
</script>
|
||||
|
||||
|
@ -41,19 +41,19 @@ test('Reading graph from definition', () => {
|
||||
node3 = node2 + 1
|
||||
node3`
|
||||
const spans = {
|
||||
functionName: [0, 8] as [number, number],
|
||||
parameter: [9, 10] as [number, number],
|
||||
node1Id: [17, 22] as [number, number],
|
||||
node1Content: [25, 30] as [number, number],
|
||||
node1LParam: [25, 26] as [number, number],
|
||||
node1RParam: [29, 30] as [number, number],
|
||||
node2Id: [35, 40] as [number, number],
|
||||
node2Content: [43, 52] as [number, number],
|
||||
node2LParam: [43, 48] as [number, number],
|
||||
node2RParam: [51, 52] as [number, number],
|
||||
node3Id: [57, 62] as [number, number],
|
||||
node3Content: [65, 74] as [number, number],
|
||||
output: [79, 84] as [number, number],
|
||||
functionName: { from: 0, to: 8 },
|
||||
parameter: { from: 9, to: 10 },
|
||||
node1Id: { from: 17, to: 22 },
|
||||
node1Content: { from: 25, to: 30 },
|
||||
node1LParam: { from: 25, to: 26 },
|
||||
node1RParam: { from: 29, to: 30 },
|
||||
node2Id: { from: 35, to: 40 },
|
||||
node2Content: { from: 43, to: 52 },
|
||||
node2LParam: { from: 43, to: 48 },
|
||||
node2RParam: { from: 51, to: 52 },
|
||||
node3Id: { from: 57, to: 62 },
|
||||
node3Content: { from: 65, to: 74 },
|
||||
output: { from: 79, to: 84 },
|
||||
}
|
||||
|
||||
const { ast, id, eid, getSpan } = parseWithSpans(code, spans)
|
||||
|
@ -29,7 +29,7 @@ import { assertDefined } from '@/util/assert'
|
||||
import { AliasAnalyzer } from '@/util/ast/aliasAnalysis'
|
||||
import { MappedKeyMap, MappedSet } from '@/util/containers'
|
||||
import { expect, test } from 'vitest'
|
||||
import { sourceRangeKey, type SourceRange } from 'ydoc-shared/yjsModel'
|
||||
import { sourceRangeKey, type SourceRange } from 'ydoc-shared/util/data/text'
|
||||
|
||||
/** The type of annotation. */
|
||||
enum AnnotationType {
|
||||
@ -86,9 +86,9 @@ function parseAnnotations(annotatedCode: string): {
|
||||
const name = bindingName ?? usageName ?? ''
|
||||
const kind = bindingPrefix != null ? AnnotationType.Binding : AnnotationType.Usage
|
||||
|
||||
const start = offset - accumulatedOffset
|
||||
const end = start + name.length
|
||||
const range: SourceRange = [start, end]
|
||||
const from = offset - accumulatedOffset
|
||||
const to = from + name.length
|
||||
const range: SourceRange = { from, to }
|
||||
|
||||
const annotation = new Annotation(kind, id)
|
||||
accumulatedOffset += match.length - name.length
|
||||
@ -152,8 +152,8 @@ class TestCase {
|
||||
return testCase
|
||||
}
|
||||
|
||||
repr(range: SourceRange): string {
|
||||
return this.code.substring(range[0], range[1])
|
||||
repr({ from, to }: SourceRange): string {
|
||||
return this.code.substring(from, to)
|
||||
}
|
||||
|
||||
prettyPrint(range: SourceRange): string {
|
||||
@ -223,22 +223,23 @@ test('Annotations parsing', () => {
|
||||
assertDefined(a, `No annotation found at [${range}].`)
|
||||
expect(a.kind, 'Invalid annotation kind.').toBe(kind)
|
||||
expect(a.id, 'Invalid annotation prefix.').toBe(prefix)
|
||||
expect(unannotatedCode.substring(range[0], range[1]), 'Invalid annotation identifier.').toBe(
|
||||
identifier,
|
||||
)
|
||||
expect(
|
||||
unannotatedCode.substring(range.from, range.to),
|
||||
'Invalid annotation identifier.',
|
||||
).toBe(identifier)
|
||||
} catch (e) {
|
||||
const message = `Invalid annotation at [${range}]: ${e}`
|
||||
throw new Error(message)
|
||||
}
|
||||
}
|
||||
|
||||
validateAnnotation([11, 12], AnnotationType.Binding, 1, 'x')
|
||||
validateAnnotation([21, 22], AnnotationType.Binding, 2, 'y')
|
||||
validateAnnotation([35, 36], AnnotationType.Binding, 3, 'x')
|
||||
validateAnnotation([40, 41], AnnotationType.Usage, 3, 'x')
|
||||
validateAnnotation([44, 45], AnnotationType.Usage, 2, 'y')
|
||||
validateAnnotation([54, 55], AnnotationType.Usage, 1, 'x')
|
||||
validateAnnotation([58, 59], AnnotationType.Usage, 2, 'y')
|
||||
validateAnnotation({ from: 11, to: 12 }, AnnotationType.Binding, 1, 'x')
|
||||
validateAnnotation({ from: 21, to: 22 }, AnnotationType.Binding, 2, 'y')
|
||||
validateAnnotation({ from: 35, to: 36 }, AnnotationType.Binding, 3, 'x')
|
||||
validateAnnotation({ from: 40, to: 41 }, AnnotationType.Usage, 3, 'x')
|
||||
validateAnnotation({ from: 44, to: 45 }, AnnotationType.Usage, 2, 'y')
|
||||
validateAnnotation({ from: 54, to: 55 }, AnnotationType.Usage, 1, 'x')
|
||||
validateAnnotation({ from: 58, to: 59 }, AnnotationType.Usage, 2, 'y')
|
||||
})
|
||||
|
||||
function runTestCase(code: string) {
|
||||
|
@ -348,7 +348,7 @@ export function parseUpdatingIdMap(
|
||||
if (idMap) setExternalIds(root.module, spans, idMap)
|
||||
return { root, spans }
|
||||
})
|
||||
const getSpan = spanMapToSpanGetter(spans)
|
||||
const getSpan = spanMapToSpanGetter(spans.nodes)
|
||||
const idMapOut = spanMapToIdMap(spans)
|
||||
return { root, idMap: idMapOut, getSpan }
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ import {
|
||||
} from '@/util/ast/raw'
|
||||
import { MappedKeyMap, MappedSet, NonEmptyStack } from '@/util/containers'
|
||||
import { LazyObject } from 'ydoc-shared/ast/parserSupport'
|
||||
import { rangeIsBefore, sourceRangeKey, type SourceRange } from 'ydoc-shared/yjsModel'
|
||||
import { rangeIsBefore, sourceRangeKey, type SourceRange } from 'ydoc-shared/util/data/text'
|
||||
|
||||
const ACCESSOR_OPERATOR = '.'
|
||||
|
||||
@ -306,13 +306,14 @@ export class AliasAnalyzer {
|
||||
const arrow = caseLine.case?.arrow
|
||||
const expression = caseLine.case?.expression
|
||||
if (pattern) {
|
||||
const armStart = parsedTreeOrTokenRange(pattern)[0]
|
||||
const armEnd =
|
||||
expression ? parsedTreeOrTokenRange(expression)[1]
|
||||
: arrow ? parsedTreeOrTokenRange(arrow)[1]
|
||||
: parsedTreeOrTokenRange(pattern)[1]
|
||||
|
||||
const armRange: SourceRange = [armStart, armEnd]
|
||||
const patternRange = parsedTreeOrTokenRange(pattern)
|
||||
const armRange: SourceRange = {
|
||||
from: patternRange.from,
|
||||
to: (expression ? parsedTreeOrTokenRange(expression)
|
||||
: arrow ? parsedTreeOrTokenRange(arrow)
|
||||
: patternRange
|
||||
).to,
|
||||
}
|
||||
this.withNewScopeOver(armRange, () => {
|
||||
this.withContext(Context.Pattern, () => {
|
||||
this.processTree(caseLine.case?.pattern)
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { Ast, RawAst } from '@/util/ast'
|
||||
import { AliasAnalyzer } from '@/util/ast/aliasAnalysis'
|
||||
import { visitRecursive } from '@/util/ast/raw'
|
||||
import { parsedTreeRange, visitRecursive } from '@/util/ast/raw'
|
||||
import { MappedKeyMap, MappedSet } from '@/util/containers'
|
||||
import type { AstId } from 'ydoc-shared/ast'
|
||||
import type { SourceDocument } from 'ydoc-shared/ast/sourceDocument'
|
||||
import { type AstId } from 'ydoc-shared/ast'
|
||||
import { type SourceDocument } from 'ydoc-shared/ast/sourceDocument'
|
||||
import { assert, assertDefined } from 'ydoc-shared/util/assert'
|
||||
import { type SourceRange, sourceRangeKey, type SourceRangeKey } from 'ydoc-shared/yjsModel'
|
||||
import { type SourceRange, sourceRangeKey, type SourceRangeKey } from 'ydoc-shared/util/data/text'
|
||||
|
||||
/** A variable name, and information about its usages. */
|
||||
export interface BindingInfo {
|
||||
@ -21,9 +21,7 @@ export function analyzeBindings(
|
||||
const toRaw = new Map<SourceRangeKey, RawAst.Tree.Function>()
|
||||
visitRecursive(Ast.rawParseModule(moduleSource.text), (node) => {
|
||||
if (node.type === RawAst.Tree.Type.Function) {
|
||||
const start = node.whitespaceStartInCodeParsed + node.whitespaceLengthInCodeParsed
|
||||
const end = start + node.childrenLengthInCodeParsed
|
||||
toRaw.set(sourceRangeKey([start, end]), node)
|
||||
toRaw.set(sourceRangeKey(parsedTreeRange(node)), node)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
|
@ -1,222 +0,0 @@
|
||||
import { assert, assertDefined } from '@/util/assert'
|
||||
import {
|
||||
childrenAstNodesOrTokens,
|
||||
parsedTreeOrTokenRange,
|
||||
rawParseModule,
|
||||
readAstOrTokenSpan,
|
||||
visitGenerator,
|
||||
visitRecursive,
|
||||
walkRecursive,
|
||||
} from '@/util/ast/raw'
|
||||
import type { Opt } from '@/util/data/opt'
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
import * as sha256 from 'lib0/hash/sha256'
|
||||
import * as map from 'lib0/map'
|
||||
import { markRaw } from 'vue'
|
||||
import * as Ast from 'ydoc-shared/ast/generated/ast'
|
||||
import { Token, Tree } from 'ydoc-shared/ast/generated/ast'
|
||||
import type { ExternalId, IdMap, SourceRange } from 'ydoc-shared/yjsModel'
|
||||
|
||||
export { AstExtended as RawAstExtended }
|
||||
|
||||
type ExtractType<V, T> =
|
||||
T extends ReadonlyArray<infer Ts> ? Extract<V, { type: Ts }> : Extract<V, { type: T }>
|
||||
|
||||
type OneOrArray<T> = T | readonly T[]
|
||||
|
||||
/**
|
||||
* AST with additional metadata containing AST IDs and original code reference. Can only be
|
||||
* constructed by parsing any enso source code string.
|
||||
*/
|
||||
class AstExtended<T extends Tree | Token = Tree | Token, HasIdMap extends boolean = true> {
|
||||
inner: T
|
||||
private ctx: AstExtendedCtx<HasIdMap>
|
||||
|
||||
public static isToken<T extends OneOrArray<Ast.Token.Type>>(type?: T) {
|
||||
return (obj: unknown): obj is AstExtended<ExtractType<Ast.Token, T>, boolean> =>
|
||||
obj instanceof AstExtended && obj.isToken(type)
|
||||
}
|
||||
|
||||
public static isTree<T extends OneOrArray<Ast.Tree.Type>>(type?: T) {
|
||||
return (obj: unknown): obj is AstExtended<ExtractType<Ast.Tree, T>, boolean> =>
|
||||
obj instanceof AstExtended && obj.isTree(type)
|
||||
}
|
||||
|
||||
public static parse(code: string): AstExtended<Tree, false>
|
||||
public static parse(code: string, idMap: IdMap): AstExtended<Tree, true>
|
||||
public static parse(code: string, idMap?: IdMap): AstExtended<Tree, boolean> {
|
||||
const ast = rawParseModule(code)
|
||||
if (idMap != null) {
|
||||
visitRecursive(ast, (node) => {
|
||||
const range = parsedTreeOrTokenRange(node)
|
||||
idMap.getOrInsertUniqueId(range)
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
const ctx = new AstExtendedCtx(code, idMap)
|
||||
return new AstExtended(ast, ctx)
|
||||
}
|
||||
|
||||
public static parseLine(code: string): AstExtended<Tree, false> {
|
||||
const block = AstExtended.parse(code)
|
||||
assert(block.isTree(Tree.Type.BodyBlock))
|
||||
return block.map((block) => {
|
||||
const soleStatement = iter.tryGetSoleValue(block.statements)
|
||||
assertDefined(soleStatement?.expression)
|
||||
return soleStatement.expression
|
||||
})
|
||||
}
|
||||
|
||||
isToken<T extends OneOrArray<Ast.Token.Type>>(
|
||||
type?: T,
|
||||
): this is AstExtended<ExtractType<Ast.Token, T>, HasIdMap> {
|
||||
if (!Token.isInstance(this.inner)) return false
|
||||
if (type == null) return true
|
||||
if (Array.isArray(type)) return (type as Ast.Token.Type[]).includes(this.inner.type)
|
||||
return this.inner.type === type
|
||||
}
|
||||
|
||||
isTree<T extends OneOrArray<Ast.Tree.Type>>(
|
||||
type?: T,
|
||||
): this is AstExtended<ExtractType<Ast.Tree, T>, HasIdMap> {
|
||||
if (!Tree.isInstance(this.inner)) return false
|
||||
if (type == null) return true
|
||||
if (Array.isArray(type)) return (type as Ast.Tree.Type[]).includes(this.inner.type)
|
||||
return this.inner.type === type
|
||||
}
|
||||
|
||||
private constructor(tree: T, ctx: AstExtendedCtx<HasIdMap>) {
|
||||
markRaw(this)
|
||||
this.inner = tree
|
||||
this.ctx = ctx
|
||||
}
|
||||
|
||||
get astId(): CondType<ExternalId, HasIdMap> {
|
||||
if (this.ctx.idMap != null) {
|
||||
const id = this.ctx.idMap.getIfExist(parsedTreeOrTokenRange(this.inner))
|
||||
assert(id != null, 'All AST nodes should have an assigned ID')
|
||||
return id as CondType<ExternalId, HasIdMap>
|
||||
} else {
|
||||
return undefined as CondType<ExternalId, HasIdMap>
|
||||
}
|
||||
}
|
||||
|
||||
tryMap<T2 extends Tree | Token>(
|
||||
mapper: (t: T) => Opt<T2>,
|
||||
): AstExtended<T2, HasIdMap> | undefined {
|
||||
const mapped = mapper(this.inner)
|
||||
if (mapped == null) return
|
||||
return new AstExtended(mapped, this.ctx)
|
||||
}
|
||||
|
||||
map<T2 extends Tree | Token>(mapper: (t: T) => T2): AstExtended<T2, HasIdMap> {
|
||||
return new AstExtended(mapper(this.inner), this.ctx)
|
||||
}
|
||||
|
||||
mapIter<T2 extends Tree | Token>(
|
||||
mapper: (t: T) => Iterable<T2>,
|
||||
): Iterable<AstExtended<T2, HasIdMap>> {
|
||||
return [...mapper(this.inner)].map((m) => new AstExtended(m, this.ctx))
|
||||
}
|
||||
|
||||
tryMapIter<T2 extends Tree | Token>(
|
||||
mapper: (t: T) => Iterable<Opt<T2>>,
|
||||
): Iterable<AstExtended<T2, HasIdMap> | undefined> {
|
||||
return [...mapper(this.inner)].map((m) =>
|
||||
m != null ? new AstExtended(m, this.ctx) : undefined,
|
||||
)
|
||||
}
|
||||
|
||||
repr() {
|
||||
return readAstOrTokenSpan(this.inner, this.ctx.parsedCode)
|
||||
}
|
||||
|
||||
span(): SourceRange {
|
||||
return parsedTreeOrTokenRange(this.inner)
|
||||
}
|
||||
|
||||
contentHash() {
|
||||
return this.ctx.getHash(this)
|
||||
}
|
||||
|
||||
children(): AstExtended<Tree | Token, HasIdMap>[] {
|
||||
return childrenAstNodesOrTokens(this.inner).map((child) => new AstExtended(child, this.ctx))
|
||||
}
|
||||
|
||||
walkRecursive(): Generator<AstExtended<Tree | Token, HasIdMap>> {
|
||||
return this.visit(walkRecursive)
|
||||
}
|
||||
|
||||
whitespaceLength() {
|
||||
return 'whitespaceLengthInCodeBuffer' in this.inner ?
|
||||
this.inner.whitespaceLengthInCodeBuffer
|
||||
: this.inner.whitespaceLengthInCodeParsed
|
||||
}
|
||||
|
||||
*visit<T2 extends Tree | Token>(
|
||||
visitor: (t: T) => Generator<T2>,
|
||||
): Generator<AstExtended<T2, HasIdMap>> {
|
||||
for (const child of visitor(this.inner)) {
|
||||
yield new AstExtended(child, this.ctx)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively visit AST nodes in depth-first order.
|
||||
* @param visitor Callback that is called for each node. If it returns `false`, the children of that
|
||||
* node will be skipped, and the walk will continue to the next sibling.
|
||||
*/
|
||||
visitRecursive(visitor: (t: AstExtended<Tree | Token, HasIdMap>) => boolean) {
|
||||
visitGenerator(this.walkRecursive(), visitor)
|
||||
}
|
||||
|
||||
get parsedCode() {
|
||||
return this.ctx.parsedCode
|
||||
}
|
||||
}
|
||||
|
||||
type CondType<T, Cond extends boolean> =
|
||||
Cond extends true ? T
|
||||
: Cond extends false ? undefined
|
||||
: T | undefined
|
||||
|
||||
class AstExtendedCtx<HasIdMap extends boolean> {
|
||||
parsedCode: string
|
||||
idMap: CondType<IdMap, HasIdMap>
|
||||
contentHashes: Map<string, Uint8Array>
|
||||
|
||||
constructor(parsedCode: string, idMap: CondType<IdMap, HasIdMap>) {
|
||||
this.parsedCode = parsedCode
|
||||
this.idMap = idMap
|
||||
this.contentHashes = new Map()
|
||||
}
|
||||
|
||||
static getHashKey(ast: AstExtended<Tree | Token, boolean>) {
|
||||
return `${ast.isToken() ? 'T.' : ''}${ast.inner.type}.${ast.span()[0]}`
|
||||
}
|
||||
|
||||
getHash(ast: AstExtended<Tree | Token, boolean>) {
|
||||
const key = AstExtendedCtx.getHashKey(ast)
|
||||
return map.setIfUndefined(this.contentHashes, key, () =>
|
||||
sha256.digest(
|
||||
encoding.encode((encoder) => {
|
||||
const whitespace = ast.whitespaceLength()
|
||||
encoding.writeUint32(encoder, whitespace)
|
||||
if (ast.isToken()) {
|
||||
encoding.writeUint8(encoder, 0)
|
||||
encoding.writeUint32(encoder, ast.inner.type)
|
||||
encoding.writeVarString(encoder, ast.repr())
|
||||
} else {
|
||||
encoding.writeUint8(encoder, 1)
|
||||
encoding.writeUint32(encoder, ast.inner.type)
|
||||
for (const child of ast.children()) {
|
||||
encoding.writeUint8Array(encoder, this.getHash(child))
|
||||
}
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
import * as RawAst from 'ydoc-shared/ast/generated/ast'
|
||||
import { rawParseModule } from 'ydoc-shared/ast/parse'
|
||||
import { LazyObject } from 'ydoc-shared/ast/parserSupport'
|
||||
import type { SourceRange } from 'ydoc-shared/yjsModel'
|
||||
import { type SourceRange } from 'ydoc-shared/util/data/text'
|
||||
|
||||
export { RawAst, rawParseModule }
|
||||
|
||||
@ -15,7 +15,7 @@ export type HasAstRange = SourceRange | RawAst.Tree | RawAst.Token
|
||||
*/
|
||||
export function readAstOrTokenSpan(node: RawAst.Tree | RawAst.Token, code: string): string {
|
||||
const range = parsedTreeOrTokenRange(node)
|
||||
return code.substring(range[0], range[1])
|
||||
return code.substring(range.from, range.to)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -25,18 +25,7 @@ export function readAstOrTokenSpan(node: RawAst.Tree | RawAst.Token, code: strin
|
||||
*/
|
||||
export function readTokenSpan(token: RawAst.Token, code: string): string {
|
||||
const range = parsedTokenRange(token)
|
||||
return code.substring(range[0], range[1])
|
||||
}
|
||||
|
||||
/** Read direct AST children. */
|
||||
export function childrenAstNodes(obj: LazyObject): RawAst.Tree[] {
|
||||
const children: RawAst.Tree[] = []
|
||||
const visitor = (obj: LazyObject) => {
|
||||
if (RawAst.Tree.isInstance(obj)) children.push(obj)
|
||||
else if (!RawAst.Token.isInstance(obj)) obj.visitChildren(visitor)
|
||||
}
|
||||
obj.visitChildren(visitor)
|
||||
return children
|
||||
return code.substring(range.from, range.to)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
@ -90,20 +79,20 @@ export function visitRecursive(
|
||||
/**
|
||||
* Read ast span information in `String.substring` compatible way. The returned span does not
|
||||
* include left whitespace offset.
|
||||
* @returns Object with `start` and `end` properties; index of first character in the `node`
|
||||
* @returns Object with `from` and `to` properties; index of first character in the `node`
|
||||
* and first character _not_ being in the `node`.
|
||||
*/
|
||||
function parsedTreeRange(tree: RawAst.Tree): SourceRange {
|
||||
const start = tree.whitespaceStartInCodeParsed + tree.whitespaceLengthInCodeParsed
|
||||
const end = start + tree.childrenLengthInCodeParsed
|
||||
return [start, end]
|
||||
export function parsedTreeRange(tree: RawAst.Tree): SourceRange {
|
||||
const from = tree.whitespaceStartInCodeParsed + tree.whitespaceLengthInCodeParsed
|
||||
const to = from + tree.childrenLengthInCodeParsed
|
||||
return { from, to }
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
function parsedTokenRange(token: RawAst.Token): SourceRange {
|
||||
const start = token.startInCodeBuffer
|
||||
const end = start + token.lengthInCodeBuffer
|
||||
return [start, end]
|
||||
const from = token.startInCodeBuffer
|
||||
const to = from + token.lengthInCodeBuffer
|
||||
return { from, to }
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
|
@ -7,7 +7,7 @@ import { setVueHost } from '@/util/codemirror/vueHostExt'
|
||||
import { yCollab } from '@/util/codemirror/yCollab'
|
||||
import { elementHierarchy } from '@/util/dom'
|
||||
import { ToValue } from '@/util/reactivity'
|
||||
import { Compartment, EditorState, Extension, Text } from '@codemirror/state'
|
||||
import { Compartment, EditorState, type Extension, Text } from '@codemirror/state'
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { LINE_BOUNDARIES } from 'enso-common/src/utilities/data/string'
|
||||
import {
|
||||
@ -40,7 +40,7 @@ export function useCodeMirror(
|
||||
/** If a value is provided, the editor state will be synchronized with it. */
|
||||
content?: ToValue<string | Y.Text>
|
||||
/** CodeMirror {@link Extension}s to include in the editor's initial state. */
|
||||
extensions?: Extension[]
|
||||
extensions?: Extension
|
||||
/** If a value is provided, it will be made available to extensions that render Vue components. */
|
||||
vueHost?: WatchSource<VueHost | undefined>
|
||||
},
|
||||
|
20
app/gui/src/project-view/util/codemirror/text.ts
Normal file
20
app/gui/src/project-view/util/codemirror/text.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { type ChangeDesc, type ChangeSet } from '@codemirror/state'
|
||||
import { type SourceRangeEdit, type SourceRangeEditDesc } from 'ydoc-shared/util/data/text'
|
||||
|
||||
/** Collect the changes in a {@link ChangeSet} as {@link SourceRangeEdit}s. */
|
||||
export function changeSetToTextEdits(changes: ChangeSet): SourceRangeEdit[] {
|
||||
const textEdits = new Array<SourceRangeEdit>()
|
||||
changes.iterChanges((from, to, _fromB, _toB, insert) =>
|
||||
textEdits.push({ from, to, insert: insert.toString() }),
|
||||
)
|
||||
return textEdits
|
||||
}
|
||||
|
||||
/** Collect the change descriptions in a {@link ChangeDesc} as {@link SourceRangeEditDesc}s. */
|
||||
export function changeDescToSourceRangeEditDesc(changeDesc: ChangeDesc): SourceRangeEditDesc[] {
|
||||
const textEdits = new Array<SourceRangeEditDesc>()
|
||||
changeDesc.iterChangedRanges((fromA, toA, fromB, toB) => {
|
||||
textEdits.push({ from: fromA, to: toA, insert: { length: toB - fromB } })
|
||||
})
|
||||
return textEdits
|
||||
}
|
@ -1,7 +1,8 @@
|
||||
/** Translation of `yjsModel` types to and from the `fileFormat` representation. */
|
||||
|
||||
import * as json from 'lib0/json'
|
||||
import { ExternalId, IdMap, sourceRangeFromKey } from 'ydoc-shared/yjsModel'
|
||||
import { sourceRangeFromKey } from 'ydoc-shared/util/data/text'
|
||||
import { type ExternalId, IdMap } from 'ydoc-shared/yjsModel'
|
||||
import * as fileFormat from './fileFormat'
|
||||
|
||||
/** Convert a JSON string to an {@link IdMap}. */
|
||||
@ -14,7 +15,7 @@ export function deserializeIdMap(idMapJson: string): IdMap {
|
||||
console.error(`Invalid range for id ${id}:`, range)
|
||||
continue
|
||||
}
|
||||
idMap.insertKnownId([index.value, index.value + size.value], id as ExternalId)
|
||||
idMap.insertKnownId({ from: index.value, to: index.value + size.value }, id as ExternalId)
|
||||
}
|
||||
return idMap
|
||||
}
|
||||
@ -30,8 +31,8 @@ export function idMapToArray(map: IdMap): fileFormat.IdMapEntry[] {
|
||||
const entries: fileFormat.IdMapEntry[] = []
|
||||
map.entries().forEach(([rangeBuffer, id]) => {
|
||||
const decoded = sourceRangeFromKey(rangeBuffer)
|
||||
const index = decoded[0]
|
||||
const endIndex = decoded[1]
|
||||
const index = decoded.from
|
||||
const endIndex = decoded.to
|
||||
if (index == null || endIndex == null) return
|
||||
const size = endIndex - index
|
||||
entries.push([{ index: { value: index }, size: { value: size } }, id])
|
||||
|
@ -1,54 +0,0 @@
|
||||
import { expect, test } from 'vitest'
|
||||
import { rangeEncloses, rangeIntersects, type SourceRange } from '../yjsModel'
|
||||
|
||||
type RangeTest = { a: SourceRange; b: SourceRange }
|
||||
|
||||
const equalRanges: RangeTest[] = [
|
||||
{ a: [0, 0], b: [0, 0] },
|
||||
{ a: [0, 1], b: [0, 1] },
|
||||
{ a: [-5, 5], b: [-5, 5] },
|
||||
]
|
||||
|
||||
const totalOverlap: RangeTest[] = [
|
||||
{ a: [0, 1], b: [0, 0] },
|
||||
{ a: [0, 2], b: [2, 2] },
|
||||
{ a: [-1, 1], b: [1, 1] },
|
||||
{ a: [0, 2], b: [0, 1] },
|
||||
{ a: [-10, 10], b: [-3, 7] },
|
||||
{ a: [0, 5], b: [1, 2] },
|
||||
{ a: [3, 5], b: [3, 4] },
|
||||
]
|
||||
|
||||
const reverseTotalOverlap: RangeTest[] = totalOverlap.map(({ a, b }) => ({ a: b, b: a }))
|
||||
|
||||
const noOverlap: RangeTest[] = [
|
||||
{ a: [0, 1], b: [2, 3] },
|
||||
{ a: [0, 1], b: [-1, -1] },
|
||||
{ a: [5, 6], b: [2, 3] },
|
||||
{ a: [0, 2], b: [-2, -1] },
|
||||
{ a: [-5, -3], b: [9, 10] },
|
||||
{ a: [-3, 2], b: [3, 4] },
|
||||
]
|
||||
|
||||
const partialOverlap: RangeTest[] = [
|
||||
{ a: [0, 3], b: [-1, 1] },
|
||||
{ a: [0, 1], b: [-1, 0] },
|
||||
{ a: [0, 0], b: [-1, 0] },
|
||||
{ a: [0, 2], b: [1, 4] },
|
||||
{ a: [-8, 0], b: [0, 10] },
|
||||
]
|
||||
|
||||
test.each([...equalRanges, ...totalOverlap])('Range $a should enclose $b', ({ a, b }) =>
|
||||
expect(rangeEncloses(a, b)).toBe(true),
|
||||
)
|
||||
test.each([...noOverlap, ...partialOverlap, ...reverseTotalOverlap])(
|
||||
'Range $a should not enclose $b',
|
||||
({ a, b }) => expect(rangeEncloses(a, b)).toBe(false),
|
||||
)
|
||||
test.each([...equalRanges, ...totalOverlap, ...reverseTotalOverlap, ...partialOverlap])(
|
||||
'Range $a should intersect $b',
|
||||
({ a, b }) => expect(rangeIntersects(a, b)).toBe(true),
|
||||
)
|
||||
test.each([...noOverlap])('Range $a should not intersect $b', ({ a, b }) =>
|
||||
expect(rangeIntersects(a, b)).toBe(false),
|
||||
)
|
@ -1,13 +1,12 @@
|
||||
import * as random from 'lib0/random'
|
||||
import {
|
||||
type ExternalId,
|
||||
type SourceRange,
|
||||
type SourceRangeKey,
|
||||
IdMap,
|
||||
sourceRangeFromKey,
|
||||
sourceRangeKey,
|
||||
} from '../yjsModel'
|
||||
import { type Token } from './token'
|
||||
} from '../util/data/text'
|
||||
import { type ExternalId, IdMap } from '../yjsModel'
|
||||
import { type Token, type TokenId } from './token'
|
||||
import { type Ast, type AstId, ExpressionStatement } from './tree'
|
||||
|
||||
declare const nodeKeyBrand: unique symbol
|
||||
@ -18,11 +17,11 @@ declare const tokenKeyBrand: unique symbol
|
||||
export type TokenKey = SourceRangeKey & { [tokenKeyBrand]: never }
|
||||
/** Create a source-range key for an `Ast`. */
|
||||
export function nodeKey(start: number, length: number): NodeKey {
|
||||
return sourceRangeKey([start, start + length]) as NodeKey
|
||||
return sourceRangeKey({ from: start, to: start + length }) as NodeKey
|
||||
}
|
||||
/** Create a source-range key for a `Token`. */
|
||||
export function tokenKey(start: number, length: number): TokenKey {
|
||||
return sourceRangeKey([start, start + length]) as TokenKey
|
||||
return sourceRangeKey({ from: start, to: start + length }) as TokenKey
|
||||
}
|
||||
|
||||
/** Maps from source ranges to `Ast`s. */
|
||||
@ -56,13 +55,20 @@ export function spanMapToIdMap(spans: SpanMap): IdMap {
|
||||
return idMap
|
||||
}
|
||||
|
||||
/** Given a `SpanMap`, return a function that can look up source ranges by AST ID. */
|
||||
export function spanMapToSpanGetter(spans: SpanMap): (id: AstId) => SourceRange | undefined {
|
||||
/** Returns a function that can look up source ranges by AST ID. */
|
||||
export function spanMapToSpanGetter(spans: NodeSpanMap): (id: AstId) => SourceRange | undefined {
|
||||
const reverseMap = new Map<AstId, SourceRange>()
|
||||
for (const [key, asts] of spans.nodes) {
|
||||
for (const [key, asts] of spans) {
|
||||
for (const ast of asts) {
|
||||
reverseMap.set(ast.id, sourceRangeFromKey(key))
|
||||
}
|
||||
}
|
||||
return id => reverseMap.get(id)
|
||||
}
|
||||
|
||||
/** Returns a function that can look up token source ranges. */
|
||||
export function tokenSpanGetter(spans: TokenSpanMap): (token: Token) => SourceRange | undefined {
|
||||
const reverseMap = new Map<TokenId, SourceRange>()
|
||||
for (const [key, token] of spans) reverseMap.set(token.id, sourceRangeFromKey(key))
|
||||
return ({ id }) => reverseMap.get(id)
|
||||
}
|
||||
|
@ -2,26 +2,24 @@ import * as random from 'lib0/random'
|
||||
import * as Y from 'yjs'
|
||||
import { subtreeRoots } from '.'
|
||||
import { assert, assertDefined } from '../util/assert'
|
||||
import type { SourceRangeEdit } from '../util/data/text'
|
||||
import { defaultLocalOrigin, tryAsOrigin, type Origin } from '../yjsModel'
|
||||
import { type SourceRangeEdit } from '../util/data/text'
|
||||
import { type Origin, defaultLocalOrigin, tryAsOrigin } from '../yjsModel'
|
||||
import { newExternalId } from './idMap'
|
||||
import { parseModule } from './parse'
|
||||
import type { SyncTokenId } from './token'
|
||||
import { Token, isTokenId } from './token'
|
||||
import type {
|
||||
AstFields,
|
||||
AstId,
|
||||
BodyBlock,
|
||||
FixedMap,
|
||||
Mutable,
|
||||
MutableAst,
|
||||
MutableBodyBlock,
|
||||
MutableInvalid,
|
||||
NodeChild,
|
||||
Owned,
|
||||
RawNodeChild,
|
||||
} from './tree'
|
||||
import { type SyncTokenId, Token, isTokenId } from './token'
|
||||
import {
|
||||
type AstFields,
|
||||
type AstId,
|
||||
type AstType,
|
||||
type BodyBlock,
|
||||
type FixedMap,
|
||||
type Mutable,
|
||||
type MutableAst,
|
||||
type MutableBodyBlock,
|
||||
type MutableInvalid,
|
||||
type NodeChild,
|
||||
type Owned,
|
||||
type RawNodeChild,
|
||||
Ast,
|
||||
Wildcard,
|
||||
asOwned,
|
||||
@ -160,7 +158,7 @@ export class MutableModule implements Module {
|
||||
|
||||
/** Copy the given node into the module. */
|
||||
copy<T extends Ast>(ast: T): Owned<Mutable<T>> {
|
||||
const id = newAstId(ast.typeName())
|
||||
const id = newAstId(ast.typeName)
|
||||
const fields = ast.fields.clone()
|
||||
this.nodes.set(id, fields as any)
|
||||
fields.set('id', id)
|
||||
@ -373,7 +371,7 @@ export class MutableModule implements Module {
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
baseObject(type: string, overrideId?: AstId): FixedMap<AstFields> {
|
||||
baseObject(type: AstType, overrideId?: AstId): FixedMap<AstFields> {
|
||||
const map = new Y.Map()
|
||||
const map_ = map as unknown as FixedMap<object>
|
||||
const id = overrideId ?? newAstId(type)
|
||||
|
@ -1,12 +1,11 @@
|
||||
import { subtreeRoots } from '.'
|
||||
import { assertDefined, assertEqual } from '../util/assert'
|
||||
import { sourceRangeFromKey } from '../yjsModel'
|
||||
import type { NodeKey, NodeSpanMap } from './idMap'
|
||||
import type { MutableModule } from './mutableModule'
|
||||
import { sourceRangeFromKey } from '../util/data/text'
|
||||
import { type NodeKey, type NodeSpanMap } from './idMap'
|
||||
import { type MutableModule } from './mutableModule'
|
||||
import { parseModuleWithSpans } from './parse'
|
||||
import { printWithSpans } from './print'
|
||||
import type { Ast, AstId } from './tree'
|
||||
import { BodyBlock, Group } from './tree'
|
||||
import { BodyBlock, Group, type Ast, type AstId } from './tree'
|
||||
|
||||
/**
|
||||
* Try to find all the spans in `expected` in `encountered`. If any are missing, use the provided `code` to determine
|
||||
@ -24,10 +23,10 @@ function checkSpans(expected: NodeSpanMap, encountered: NodeSpanMap, code: strin
|
||||
const lostInline = new Array<Ast>()
|
||||
const lostBlock = new Array<Ast>()
|
||||
for (const [key, ast] of lost) {
|
||||
const [start, end] = sourceRangeFromKey(key)
|
||||
const { from, to } = sourceRangeFromKey(key)
|
||||
// Do not report lost empty body blocks, we don't want them to be considered for repair.
|
||||
if (start === end && ast instanceof BodyBlock) continue
|
||||
;(code.substring(start, end).match(/[\r\n]/) ? lostBlock : lostInline).push(ast)
|
||||
if (from === to && ast instanceof BodyBlock) continue
|
||||
;(code.substring(from, to).match(/[\r\n]/) ? lostBlock : lostInline).push(ast)
|
||||
}
|
||||
return { lostInline, lostBlock }
|
||||
}
|
||||
|
@ -1,11 +1,16 @@
|
||||
import { assertDefined } from '../util/assert'
|
||||
import type { SourceRangeEdit } from '../util/data/text'
|
||||
import { offsetEdit, textChangeToEdits } from '../util/data/text'
|
||||
import type { Origin, SourceRange } from '../yjsModel'
|
||||
import { rangeEquals, sourceRangeFromKey } from '../yjsModel'
|
||||
import type { Module, ModuleUpdate } from './mutableModule'
|
||||
import {
|
||||
rangeEquals,
|
||||
sourceRangeFromKey,
|
||||
textChangeToEdits,
|
||||
translateRange,
|
||||
type SourceRange,
|
||||
type SourceRangeEdit,
|
||||
} from '../util/data/text'
|
||||
import { type Origin } from '../yjsModel'
|
||||
import { type Module, type ModuleUpdate } from './mutableModule'
|
||||
import { printWithSpans } from './print'
|
||||
import type { AstId } from './tree'
|
||||
import { type AstId } from './tree'
|
||||
|
||||
/**
|
||||
* Provides a view of the text representation of a module,
|
||||
@ -32,9 +37,9 @@ export class SourceDocument {
|
||||
clear() {
|
||||
if (this.state.spans.size !== 0) this.state.spans.clear()
|
||||
if (this.state.text !== '') {
|
||||
const range: SourceRange = [0, this.state.text.length]
|
||||
const textEdit = { from: 0, to: this.state.text.length, insert: '' }
|
||||
this.state.text = ''
|
||||
this.notifyObservers([{ range, insert: '' }], undefined)
|
||||
this.notifyObservers([textEdit], undefined)
|
||||
}
|
||||
}
|
||||
|
||||
@ -52,10 +57,10 @@ export class SourceDocument {
|
||||
if (!oldSpan || !rangeEquals(range, oldSpan)) this.state.spans.set(node.id, range)
|
||||
if (update.updateRoots.has(node.id) && node.id !== root.id) {
|
||||
assertDefined(oldSpan)
|
||||
const oldCode = this.rawState.text.slice(oldSpan[0], oldSpan[1])
|
||||
const newCode = printed.code.slice(range[0], range[1])
|
||||
const oldCode = this.rawState.text.slice(oldSpan.from, oldSpan.to)
|
||||
const newCode = printed.code.slice(range.from, range.to)
|
||||
const subedits = textChangeToEdits(oldCode, newCode).map(textEdit =>
|
||||
offsetEdit(textEdit, oldSpan[0]),
|
||||
translateRange(textEdit, oldSpan.from),
|
||||
)
|
||||
subtreeTextEdits.push(...subedits)
|
||||
}
|
||||
@ -64,7 +69,7 @@ export class SourceDocument {
|
||||
if (printed.code !== this.rawState.text) {
|
||||
const textEdits =
|
||||
update.updateRoots.has(root.id) ?
|
||||
[{ range: [0, this.rawState.text.length] satisfies SourceRange, insert: printed.code }]
|
||||
[{ from: 0, to: this.rawState.text.length, insert: printed.code }]
|
||||
: subtreeTextEdits
|
||||
this.state.text = printed.code
|
||||
this.notifyObservers(textEdits, update.origin)
|
||||
@ -85,7 +90,7 @@ export class SourceDocument {
|
||||
observe(observer: SourceDocumentObserver) {
|
||||
this.observers.push(observer)
|
||||
if (this.rawState.text.length)
|
||||
observer([{ range: [0, 0], insert: this.rawState.text }], undefined)
|
||||
observer([{ from: 0, to: 0, insert: this.rawState.text }], undefined)
|
||||
}
|
||||
|
||||
/** Remove a callback to no longer be called with a list of edits on every update. */
|
||||
|
@ -2,21 +2,20 @@ import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import * as map from 'lib0/map'
|
||||
import { assert, assertDefined } from '../util/assert'
|
||||
import {
|
||||
type SourceRange,
|
||||
type SourceRangeEdit,
|
||||
type SourceRangeEditDesc,
|
||||
type SourceRangeKey,
|
||||
type SpanTree,
|
||||
applyTextEdits,
|
||||
applyTextEditsToSpans,
|
||||
enclosingSpans,
|
||||
textChangeToEdits,
|
||||
trimEnd,
|
||||
} from '../util/data/text'
|
||||
import {
|
||||
type SourceRange,
|
||||
type SourceRangeKey,
|
||||
rangeLength,
|
||||
sourceRangeFromKey,
|
||||
sourceRangeKey,
|
||||
} from '../yjsModel'
|
||||
textChangeToEdits,
|
||||
trimEnd,
|
||||
} from '../util/data/text'
|
||||
import { xxHash128 } from './ffi'
|
||||
import { type NodeKey, type NodeSpanMap, newExternalId } from './idMap'
|
||||
import type { Module, MutableModule } from './mutableModule'
|
||||
@ -88,7 +87,7 @@ function calculateCorrespondence(
|
||||
astSpans: NodeSpanMap,
|
||||
parsedRoot: Ast,
|
||||
parsedSpans: NodeSpanMap,
|
||||
textEdits: SourceRangeEdit[],
|
||||
textEdits: SourceRangeEditDesc[],
|
||||
codeAfter: string,
|
||||
): Map<AstId, Ast> {
|
||||
const newSpans = new Map<AstId, SourceRange>()
|
||||
@ -118,7 +117,7 @@ function calculateCorrespondence(
|
||||
for (const [astAfter, partsAfter] of astsMatchingPartsAfter) {
|
||||
for (const partAfter of partsAfter) {
|
||||
const astBefore = partAfterToAstBefore.get(sourceRangeKey(partAfter))!
|
||||
if (astBefore.typeName() === astAfter.typeName()) {
|
||||
if (astBefore.typeName === astAfter.typeName) {
|
||||
;(rangeLength(newSpans.get(astAfter.id)!) === rangeLength(partAfter) ?
|
||||
toSync
|
||||
: candidates
|
||||
@ -143,7 +142,7 @@ function calculateCorrespondence(
|
||||
const unmatchedNewAsts = newAsts.filter(ast => !newIdsMatched.has(ast.id))
|
||||
const unmatchedOldAsts = oldHashes.get(hash)?.filter(ast => !oldIdsMatched.has(ast.id)) ?? []
|
||||
for (const [unmatchedNew, unmatchedOld] of iter.zip(unmatchedNewAsts, unmatchedOldAsts)) {
|
||||
if (unmatchedNew.typeName() === unmatchedOld.typeName()) {
|
||||
if (unmatchedNew.typeName === unmatchedOld.typeName) {
|
||||
toSync.set(unmatchedOld.id, unmatchedNew)
|
||||
// Update the matched-IDs indices.
|
||||
oldIdsMatched.add(unmatchedOld.id)
|
||||
@ -156,13 +155,13 @@ function calculateCorrespondence(
|
||||
// movement-matching.
|
||||
for (const [beforeId, after] of candidates) {
|
||||
if (oldIdsMatched.has(beforeId) || newIdsMatched.has(after.id)) continue
|
||||
if (after.typeName() === ast.module.get(beforeId).typeName()) {
|
||||
if (after.typeName === ast.module.get(beforeId).typeName) {
|
||||
toSync.set(beforeId, after)
|
||||
}
|
||||
}
|
||||
|
||||
for (const [idBefore, astAfter] of toSync.entries())
|
||||
assert(ast.module.get(idBefore).typeName() === astAfter.typeName())
|
||||
assert(ast.module.get(idBefore).typeName === astAfter.typeName)
|
||||
return toSync
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,7 @@ import { newExternalId } from './idMap'
|
||||
import type { AstId, DeepReadonly, NodeChild, Owned } from './tree'
|
||||
import { Ast } from './tree'
|
||||
export import TokenType = RawAst.Token.Type
|
||||
export import tokenTypes = RawAst.Token.typeNames
|
||||
|
||||
/** Whether the given value is a {@link Token}. */
|
||||
export function isToken(maybeToken: unknown): maybeToken is Token {
|
||||
@ -34,15 +35,11 @@ export interface SyncTokenId {
|
||||
|
||||
/** A structure representing a lexical source code unit in the AST. */
|
||||
export class Token implements SyncTokenId {
|
||||
readonly id: TokenId
|
||||
code_: string
|
||||
tokenType_: TokenType | undefined
|
||||
|
||||
private constructor(code: string, type: TokenType | undefined, id: TokenId) {
|
||||
this.id = id
|
||||
this.code_ = code
|
||||
this.tokenType_ = type
|
||||
}
|
||||
private constructor(
|
||||
readonly code_: string,
|
||||
readonly tokenType_: TokenType | undefined,
|
||||
readonly id: TokenId,
|
||||
) {}
|
||||
|
||||
/** The id of this token. */
|
||||
get externalId(): TokenId {
|
||||
@ -71,8 +68,8 @@ export class Token implements SyncTokenId {
|
||||
}
|
||||
|
||||
/** The name of the token type of this token. */
|
||||
typeName(): string {
|
||||
if (this.tokenType_) return RawAst.Token.typeNames[this.tokenType_]!
|
||||
get typeName(): string {
|
||||
if (this.tokenType_ != null) return RawAst.Token.typeNames[this.tokenType_]!
|
||||
else return 'Raw'
|
||||
}
|
||||
}
|
||||
|
@ -125,10 +125,33 @@ const nodeMetadataKeys = allKeys<NodeMetadataFields>({
|
||||
export type NodeMetadata = FixedMapView<NodeMetadataFields & MetadataFields>
|
||||
export type MutableNodeMetadata = FixedMap<NodeMetadataFields & MetadataFields>
|
||||
|
||||
export const astTypes = [
|
||||
'App',
|
||||
'Assignment',
|
||||
'BodyBlock',
|
||||
'ExpressionStatement',
|
||||
'FunctionDef',
|
||||
'Generic',
|
||||
'Group',
|
||||
'Ident',
|
||||
'Import',
|
||||
'Invalid',
|
||||
'NegationApp',
|
||||
'NumericLiteral',
|
||||
'OprApp',
|
||||
'PropertyAccess',
|
||||
'TextLiteral',
|
||||
'UnaryOprApp',
|
||||
'AutoscopedIdentifier',
|
||||
'Vector',
|
||||
'Wildcard',
|
||||
] as const
|
||||
export type AstType = (typeof astTypes)[number]
|
||||
|
||||
/** @internal */
|
||||
interface RawAstFields {
|
||||
id: AstId
|
||||
type: string
|
||||
type: AstType
|
||||
parent: AstId | undefined
|
||||
metadata: FixedMap<MetadataFields>
|
||||
}
|
||||
@ -227,7 +250,7 @@ export abstract class Ast {
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
typeName(): string {
|
||||
get typeName(): AstType {
|
||||
return this.fields.get('type')
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,70 @@
|
||||
import { fc, test } from '@fast-check/vitest'
|
||||
import { expect } from 'vitest'
|
||||
import { applyTextEdits, applyTextEditsToSpans, textChangeToEdits, trimEnd } from '../text'
|
||||
import {
|
||||
type SourceRange,
|
||||
applyTextEdits,
|
||||
applyTextEditsToSpans,
|
||||
rangeEncloses,
|
||||
rangeIntersects,
|
||||
textChangeToEdits,
|
||||
trimEnd,
|
||||
} from '../text'
|
||||
|
||||
type RangeTest = { a: SourceRange; b: SourceRange }
|
||||
|
||||
function rangeTest({ a, b }: { a: number[]; b: number[] }) {
|
||||
return { a: { from: a[0]!, to: a[1]! }, b: { from: b[0]!, to: b[1]! } }
|
||||
}
|
||||
|
||||
const equalRanges: RangeTest[] = [
|
||||
{ a: [0, 0], b: [0, 0] },
|
||||
{ a: [0, 1], b: [0, 1] },
|
||||
{ a: [-5, 5], b: [-5, 5] },
|
||||
].map(rangeTest)
|
||||
|
||||
const totalOverlap: RangeTest[] = [
|
||||
{ a: [0, 1], b: [0, 0] },
|
||||
{ a: [0, 2], b: [2, 2] },
|
||||
{ a: [-1, 1], b: [1, 1] },
|
||||
{ a: [0, 2], b: [0, 1] },
|
||||
{ a: [-10, 10], b: [-3, 7] },
|
||||
{ a: [0, 5], b: [1, 2] },
|
||||
{ a: [3, 5], b: [3, 4] },
|
||||
].map(rangeTest)
|
||||
|
||||
const reverseTotalOverlap: RangeTest[] = totalOverlap.map(({ a, b }) => ({ a: b, b: a }))
|
||||
|
||||
const noOverlap: RangeTest[] = [
|
||||
{ a: [0, 1], b: [2, 3] },
|
||||
{ a: [0, 1], b: [-1, -1] },
|
||||
{ a: [5, 6], b: [2, 3] },
|
||||
{ a: [0, 2], b: [-2, -1] },
|
||||
{ a: [-5, -3], b: [9, 10] },
|
||||
{ a: [-3, 2], b: [3, 4] },
|
||||
].map(rangeTest)
|
||||
|
||||
const partialOverlap: RangeTest[] = [
|
||||
{ a: [0, 3], b: [-1, 1] },
|
||||
{ a: [0, 1], b: [-1, 0] },
|
||||
{ a: [0, 0], b: [-1, 0] },
|
||||
{ a: [0, 2], b: [1, 4] },
|
||||
{ a: [-8, 0], b: [0, 10] },
|
||||
].map(rangeTest)
|
||||
|
||||
test.each([...equalRanges, ...totalOverlap])('Range $a should enclose $b', ({ a, b }) =>
|
||||
expect(rangeEncloses(a, b)).toBe(true),
|
||||
)
|
||||
test.each([...noOverlap, ...partialOverlap, ...reverseTotalOverlap])(
|
||||
'Range $a should not enclose $b',
|
||||
({ a, b }) => expect(rangeEncloses(a, b)).toBe(false),
|
||||
)
|
||||
test.each([...equalRanges, ...totalOverlap, ...reverseTotalOverlap, ...partialOverlap])(
|
||||
'Range $a should intersect $b',
|
||||
({ a, b }) => expect(rangeIntersects(a, b)).toBe(true),
|
||||
)
|
||||
test.each([...noOverlap])('Range $a should not intersect $b', ({ a, b }) =>
|
||||
expect(rangeIntersects(a, b)).toBe(false),
|
||||
)
|
||||
|
||||
test.prop({
|
||||
before: fc.array(fc.boolean(), { minLength: 32, maxLength: 64 }),
|
||||
@ -45,12 +109,12 @@ function checkCorrespondence(a: string[], b: string[]) {
|
||||
Performs the same check as {@link checkCorrespondence}, for correspondences that are not expected to be reversible.
|
||||
*/
|
||||
function checkCorrespondenceForward(before: string[], after: string[]) {
|
||||
const leadingSpacesAndLength = (input: string): [number, number] => [
|
||||
input.lastIndexOf(' ') + 1,
|
||||
input.length,
|
||||
]
|
||||
const spacesAndHyphens = ([spaces, length]: readonly [number, number]) => {
|
||||
return ' '.repeat(spaces) + '-'.repeat(length - spaces)
|
||||
const leadingSpacesAndLength = (input: string): SourceRange => ({
|
||||
from: input.lastIndexOf(' ') + 1,
|
||||
to: input.length,
|
||||
})
|
||||
const spacesAndHyphens = ({ from, to }: SourceRange) => {
|
||||
return ' '.repeat(from) + '-'.repeat(to - from)
|
||||
}
|
||||
const edits = textChangeToEdits(before[0]!, after[0]!)
|
||||
const spansAfter = applyTextEditsToSpans(edits, before.slice(1).map(leadingSpacesAndLength)).map(
|
||||
|
@ -1,8 +1,63 @@
|
||||
import * as iter from 'enso-common/src/utilities/data/iter'
|
||||
import diff from 'fast-diff'
|
||||
import { rangeEncloses, rangeLength, type SourceRange } from '../../yjsModel'
|
||||
import { type DeepReadonly } from '../../ast'
|
||||
|
||||
export type SourceRangeEdit = { range: SourceRange; insert: string }
|
||||
export interface SourceRange {
|
||||
readonly from: number
|
||||
readonly to: number
|
||||
}
|
||||
declare const brandSourceRangeKey: unique symbol
|
||||
export type SourceRangeKey = string & { [brandSourceRangeKey]: never }
|
||||
|
||||
/** Serializes a {@link SourceRange}, making it suitable for use as a key in maps or sets. */
|
||||
export function sourceRangeKey({ from, to }: SourceRange): SourceRangeKey {
|
||||
return `${from.toString(16)}:${to.toString(16)}` as SourceRangeKey
|
||||
}
|
||||
/** Deserializes a {@link SourceRange} that was serialized by {@link sourceRangeKey} */
|
||||
export function sourceRangeFromKey(key: SourceRangeKey): SourceRange {
|
||||
const [from, to] = key.split(':').map(x => parseInt(x, 16)) as [number, number]
|
||||
return { from, to }
|
||||
}
|
||||
|
||||
/** @returns Whether the two ranges have the same start and end. */
|
||||
export function rangeEquals(a: SourceRange, b: SourceRange): boolean {
|
||||
return a.from == b.from && a.to == b.to
|
||||
}
|
||||
|
||||
/** @returns Whether the point `b` is within the range `a`. */
|
||||
export function rangeIncludes(a: SourceRange, b: number): boolean {
|
||||
return a.from <= b && a.to >= b
|
||||
}
|
||||
|
||||
/** @returns whether the point `b` is within the range `a`. */
|
||||
export function rangeLength(a: SourceRange): number {
|
||||
return a.to - a.from
|
||||
}
|
||||
|
||||
/** @returns whether range `a` fully contains range `b`. */
|
||||
export function rangeEncloses(a: SourceRange, b: SourceRange): boolean {
|
||||
return a.from <= b.from && a.to >= b.to
|
||||
}
|
||||
|
||||
/** @returns Whether the ranges meet. */
|
||||
export function rangeIntersects(a: SourceRange, b: SourceRange): boolean {
|
||||
return a.from <= b.to && a.to >= b.from
|
||||
}
|
||||
|
||||
/** Whether the given range is before the other range. */
|
||||
export function rangeIsBefore(a: SourceRange, b: SourceRange): boolean {
|
||||
return a.to <= b.from
|
||||
}
|
||||
|
||||
/** Describes how a change to text will affect document locations. */
|
||||
export interface SourceRangeEditDesc extends SourceRange {
|
||||
insert: { length: number }
|
||||
}
|
||||
|
||||
/** A change that can be applied to text. */
|
||||
export interface SourceRangeEdit extends SourceRangeEditDesc {
|
||||
insert: string
|
||||
}
|
||||
|
||||
/** Given text and a set of `TextEdit`s, return the result of applying the edits to the text. */
|
||||
export function applyTextEdits(
|
||||
@ -10,13 +65,13 @@ export function applyTextEdits(
|
||||
textEdits: ReadonlyArray<Readonly<SourceRangeEdit>>,
|
||||
) {
|
||||
const editsOrdered = [...textEdits]
|
||||
editsOrdered.sort((a, b) => a.range[0] - b.range[0])
|
||||
editsOrdered.sort((a, b) => a.from - b.from)
|
||||
let start = 0
|
||||
let newText = ''
|
||||
for (const textEdit of editsOrdered) {
|
||||
newText += oldText.slice(start, textEdit.range[0])
|
||||
newText += oldText.slice(start, textEdit.from)
|
||||
newText += textEdit.insert
|
||||
start = textEdit.range[1]
|
||||
start = textEdit.to
|
||||
}
|
||||
newText += oldText.slice(start)
|
||||
return newText
|
||||
@ -36,7 +91,7 @@ export function textChangeToEdits(before: string, after: string): SourceRangeEdi
|
||||
for (const [op, text] of diff(before, after)) {
|
||||
switch (op) {
|
||||
case diff.INSERT:
|
||||
if (!nextEdit) nextEdit = { range: [pos, pos], insert: '' }
|
||||
if (!nextEdit) nextEdit = { from: pos, to: pos, insert: '' }
|
||||
nextEdit.insert = text
|
||||
break
|
||||
case diff.EQUAL:
|
||||
@ -49,7 +104,7 @@ export function textChangeToEdits(before: string, after: string): SourceRangeEdi
|
||||
case diff.DELETE: {
|
||||
if (nextEdit) textEdits.push(nextEdit)
|
||||
const endPos = pos + text.length
|
||||
nextEdit = { range: [pos, endPos], insert: '' }
|
||||
nextEdit = { from: pos, to: endPos, insert: '' }
|
||||
pos = endPos
|
||||
break
|
||||
}
|
||||
@ -59,9 +114,9 @@ export function textChangeToEdits(before: string, after: string): SourceRangeEdi
|
||||
return textEdits
|
||||
}
|
||||
|
||||
/** Translate a `TextEdit` by the specified offset. */
|
||||
export function offsetEdit(textEdit: SourceRangeEdit, offset: number): SourceRangeEdit {
|
||||
return { ...textEdit, range: [textEdit.range[0] + offset, textEdit.range[1] + offset] }
|
||||
/** Translate a source range by the specified offset. */
|
||||
export function translateRange<T extends SourceRange>(textEdit: T, offset: number): T {
|
||||
return { ...textEdit, from: textEdit.from + offset, to: textEdit.to + offset }
|
||||
}
|
||||
|
||||
/**
|
||||
@ -71,38 +126,42 @@ export function offsetEdit(textEdit: SourceRangeEdit, offset: number): SourceRan
|
||||
* @returns - A sequence of: Each span from `spansBefore` paired with the smallest span of the text after the edit that
|
||||
* contains all text that was in the original span and has not been deleted.
|
||||
*/
|
||||
export function applyTextEditsToSpans(textEdits: SourceRangeEdit[], spansBefore: SourceRange[]) {
|
||||
export function applyTextEditsToSpans(
|
||||
textEdits: DeepReadonly<SourceRangeEditDesc[]>,
|
||||
spansBefore: DeepReadonly<SourceRange[]>,
|
||||
) {
|
||||
// Gather start and end points.
|
||||
const numerically = (a: number, b: number) => a - b
|
||||
const starts = new iter.Resumable(spansBefore.map(([start, _end]) => start).sort(numerically))
|
||||
const ends = new iter.Resumable(spansBefore.map(([_start, end]) => end).sort(numerically))
|
||||
const starts = new iter.Resumable(spansBefore.map(({ from }) => from).sort(numerically))
|
||||
const ends = new iter.Resumable(spansBefore.map(({ to }) => to).sort(numerically))
|
||||
|
||||
// Construct translations from old locations to new locations for all start and end points.
|
||||
const startMap = new Map<number, number>()
|
||||
const endMap = new Map<number, number>()
|
||||
let offset = 0
|
||||
for (const { range, insert } of textEdits) {
|
||||
for (const textEdit of textEdits) {
|
||||
const { from, to, insert } = textEdit
|
||||
starts.advanceWhile(start => {
|
||||
if (start < range[0]) {
|
||||
if (start < from) {
|
||||
startMap.set(start, start + offset)
|
||||
return true
|
||||
} else if (start <= range[1]) {
|
||||
startMap.set(start, range[0] + offset + insert.length)
|
||||
} else if (start <= to) {
|
||||
startMap.set(start, from + offset + insert.length)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
ends.advanceWhile(end => {
|
||||
if (end <= range[0]) {
|
||||
if (end <= from) {
|
||||
endMap.set(end, end + offset)
|
||||
return true
|
||||
} else if (end <= range[1]) {
|
||||
endMap.set(end, range[0] + offset)
|
||||
} else if (end <= to) {
|
||||
endMap.set(end, from + offset)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
offset += insert.length - rangeLength(range)
|
||||
offset += insert.length - rangeLength(textEdit)
|
||||
}
|
||||
starts.forEach(start => startMap.set(start, start + offset))
|
||||
ends.forEach(end => endMap.set(end, end + offset))
|
||||
@ -110,9 +169,10 @@ export function applyTextEditsToSpans(textEdits: SourceRangeEdit[], spansBefore:
|
||||
// Apply the translations to the map.
|
||||
const spansBeforeAndAfter = new Array<readonly [SourceRange, SourceRange]>()
|
||||
for (const spanBefore of spansBefore) {
|
||||
const startAfter = startMap.get(spanBefore[0])!
|
||||
const endAfter = endMap.get(spanBefore[1])!
|
||||
if (endAfter > startAfter) spansBeforeAndAfter.push([spanBefore, [startAfter, endAfter]])
|
||||
const startAfter = startMap.get(spanBefore.from)!
|
||||
const endAfter = endMap.get(spanBefore.to)!
|
||||
if (endAfter > startAfter)
|
||||
spansBeforeAndAfter.push([spanBefore, { from: startAfter, to: endAfter }])
|
||||
}
|
||||
return spansBeforeAndAfter
|
||||
}
|
||||
@ -150,7 +210,9 @@ export function enclosingSpans<NodeId>(
|
||||
}
|
||||
|
||||
/** Return the given range with any trailing spaces stripped. */
|
||||
export function trimEnd(range: SourceRange, text: string): SourceRange {
|
||||
const trimmedLength = text.slice(range[0], range[1]).search(/ +$/)
|
||||
return trimmedLength === -1 ? range : [range[0], range[0] + trimmedLength]
|
||||
export function trimEnd<T extends SourceRange>(range: T, text: string): T {
|
||||
const trimmedLength = text.slice(range.from, range.to).search(/ +$/)
|
||||
return trimmedLength === -1 ? range : (
|
||||
{ ...range, from: range.from, to: range.from + trimmedLength }
|
||||
)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import type { TreeCursor } from '@lezer/common'
|
||||
import { type SyntaxNode, type TreeCursor } from '@lezer/common'
|
||||
|
||||
/**
|
||||
* Compares the value of `cursor.name` to the provided value. This can be used instead of reading the field directly to
|
||||
@ -8,3 +8,12 @@ import type { TreeCursor } from '@lezer/common'
|
||||
export function isNodeType(cursor: TreeCursor, type: string): boolean {
|
||||
return cursor.name === type
|
||||
}
|
||||
|
||||
/** Yields the provided node, and its parents recursively. */
|
||||
export function* syntaxNodeAncestors(syn: SyntaxNode | null) {
|
||||
let currentSyn: SyntaxNode | null = syn
|
||||
while (currentSyn != null) {
|
||||
yield currentSyn
|
||||
currentSyn = currentSyn.parent
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
import * as object from 'lib0/object'
|
||||
import * as random from 'lib0/random'
|
||||
import * as Y from 'yjs'
|
||||
import { type SourceRange, type SourceRangeKey, sourceRangeKey } from './util/data/text'
|
||||
|
||||
export type Uuid = `${string}-${string}-${string}-${string}-${string}`
|
||||
|
||||
@ -166,19 +167,6 @@ export function tryAsOrigin(origin: string): Origin | undefined {
|
||||
if (origin === 'remote') return origin
|
||||
}
|
||||
|
||||
export type SourceRange = readonly [start: number, end: number]
|
||||
declare const brandSourceRangeKey: unique symbol
|
||||
export type SourceRangeKey = string & { [brandSourceRangeKey]: never }
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function sourceRangeKey(range: SourceRange): SourceRangeKey {
|
||||
return `${range[0].toString(16)}:${range[1].toString(16)}` as SourceRangeKey
|
||||
}
|
||||
/** TODO: Add docs */
|
||||
export function sourceRangeFromKey(key: SourceRangeKey): SourceRange {
|
||||
return key.split(':').map(x => parseInt(x, 16)) as [number, number]
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export class IdMap {
|
||||
private readonly rangeToExpr: Map<string, ExternalId>
|
||||
@ -278,33 +266,3 @@ const uuidRegex = /^[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/
|
||||
export function isUuid(x: unknown): x is Uuid {
|
||||
return typeof x === 'string' && x.length === 36 && uuidRegex.test(x)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function rangeEquals(a: SourceRange, b: SourceRange): boolean {
|
||||
return a[0] == b[0] && a[1] == b[1]
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function rangeIncludes(a: SourceRange, b: number): boolean {
|
||||
return a[0] <= b && a[1] >= b
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function rangeLength(a: SourceRange): number {
|
||||
return a[1] - a[0]
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function rangeEncloses(a: SourceRange, b: SourceRange): boolean {
|
||||
return a[0] <= b[0] && a[1] >= b[1]
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function rangeIntersects(a: SourceRange, b: SourceRange): boolean {
|
||||
return a[0] <= b[1] && a[1] >= b[0]
|
||||
}
|
||||
|
||||
/** Whether the given range is before the other range. */
|
||||
export function rangeIsBefore(a: SourceRange, b: SourceRange): boolean {
|
||||
return a[1] <= b[0]
|
||||
}
|
||||
|
163
pnpm-lock.yaml
163
pnpm-lock.yaml
@ -125,26 +125,26 @@ importers:
|
||||
specifier: ^7.24.7
|
||||
version: 7.25.6
|
||||
'@codemirror/commands':
|
||||
specifier: ^6.6.0
|
||||
version: 6.6.0
|
||||
specifier: ^6.7.1
|
||||
version: 6.7.1
|
||||
'@codemirror/lang-markdown':
|
||||
specifier: ^v6.3.0
|
||||
version: 6.3.0
|
||||
'@codemirror/language':
|
||||
specifier: ^6.10.2
|
||||
version: 6.10.2
|
||||
specifier: ^6.10.6
|
||||
version: 6.10.6
|
||||
'@codemirror/lint':
|
||||
specifier: ^6.8.1
|
||||
version: 6.8.1
|
||||
specifier: ^6.8.4
|
||||
version: 6.8.4
|
||||
'@codemirror/search':
|
||||
specifier: ^6.5.6
|
||||
version: 6.5.6
|
||||
specifier: ^6.5.8
|
||||
version: 6.5.8
|
||||
'@codemirror/state':
|
||||
specifier: ^6.4.1
|
||||
version: 6.4.1
|
||||
specifier: ^6.5.0
|
||||
version: 6.5.0
|
||||
'@codemirror/view':
|
||||
specifier: ^6.35.0
|
||||
version: 6.35.0
|
||||
specifier: ^6.35.3
|
||||
version: 6.35.3
|
||||
'@fast-check/vitest':
|
||||
specifier: ^0.0.8
|
||||
version: 0.0.8(vitest@1.6.0(@types/node@22.9.0)(jsdom@24.1.0)(lightningcss@1.25.1))
|
||||
@ -1410,8 +1410,8 @@ packages:
|
||||
'@codemirror/view': ^6.0.0
|
||||
'@lezer/common': ^1.0.0
|
||||
|
||||
'@codemirror/commands@6.6.0':
|
||||
resolution: {integrity: sha512-qnY+b7j1UNcTS31Eenuc/5YJB6gQOzkUoNmJQc0rznwqSRpeaWWpjkWy2C/MPTcePpsKJEM26hXrOXl1+nceXg==}
|
||||
'@codemirror/commands@6.7.1':
|
||||
resolution: {integrity: sha512-llTrboQYw5H4THfhN4U3qCnSZ1SOJ60ohhz+SzU0ADGtwlc533DtklQP0vSFaQuCPDn3BPpOd1GbbnUtwNjsrw==}
|
||||
|
||||
'@codemirror/lang-css@6.3.0':
|
||||
resolution: {integrity: sha512-CyR4rUNG9OYcXDZwMPvJdtb6PHbBDKUc/6Na2BIwZ6dKab1JQqKa4di+RNRY9Myn7JB81vayKwJeQ7jEdmNVDA==}
|
||||
@ -1428,23 +1428,23 @@ packages:
|
||||
'@codemirror/lang-markdown@6.3.0':
|
||||
resolution: {integrity: sha512-lYrI8SdL/vhd0w0aHIEvIRLRecLF7MiiRfzXFZY94dFwHqC9HtgxgagJ8fyYNBldijGatf9wkms60d8SrAj6Nw==}
|
||||
|
||||
'@codemirror/language@6.10.2':
|
||||
resolution: {integrity: sha512-kgbTYTo0Au6dCSc/TFy7fK3fpJmgHDv1sG1KNQKJXVi+xBTEeBPY/M30YXiU6mMXeH+YIDLsbrT4ZwNRdtF+SA==}
|
||||
'@codemirror/language@6.10.6':
|
||||
resolution: {integrity: sha512-KrsbdCnxEztLVbB5PycWXFxas4EOyk/fPAfruSOnDDppevQgid2XZ+KbJ9u+fDikP/e7MW7HPBTvTb8JlZK9vA==}
|
||||
|
||||
'@codemirror/lint@6.8.1':
|
||||
resolution: {integrity: sha512-IZ0Y7S4/bpaunwggW2jYqwLuHj0QtESf5xcROewY6+lDNwZ/NzvR4t+vpYgg9m7V8UXLPYqG+lu3DF470E5Oxg==}
|
||||
'@codemirror/lint@6.8.4':
|
||||
resolution: {integrity: sha512-u4q7PnZlJUojeRe8FJa/njJcMctISGgPQ4PnWsd9268R4ZTtU+tfFYmwkBvgcrK2+QQ8tYFVALVb5fVJykKc5A==}
|
||||
|
||||
'@codemirror/search@6.5.6':
|
||||
resolution: {integrity: sha512-rpMgcsh7o0GuCDUXKPvww+muLA1pDJaFrpq/CCHtpQJYz8xopu4D1hPcKRoDD0YlF8gZaqTNIRa4VRBWyhyy7Q==}
|
||||
'@codemirror/search@6.5.8':
|
||||
resolution: {integrity: sha512-PoWtZvo7c1XFeZWmmyaOp2G0XVbOnm+fJzvghqGAktBW3cufwJUWvSCcNG0ppXiBEM05mZu6RhMtXPv2hpllig==}
|
||||
|
||||
'@codemirror/state@6.4.1':
|
||||
resolution: {integrity: sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==}
|
||||
'@codemirror/state@6.5.0':
|
||||
resolution: {integrity: sha512-MwBHVK60IiIHDcoMet78lxt6iw5gJOGSbNbOIVBHWVXIH4/Nq1+GQgLLGgI1KlnN86WDXsPudVaqYHKBIx7Eyw==}
|
||||
|
||||
'@codemirror/theme-one-dark@6.1.2':
|
||||
resolution: {integrity: sha512-F+sH0X16j/qFLMAfbciKTxVOwkdAS336b7AXTKOZhy8BR3eH/RelsnLgLFINrpST63mmN2OuwUt0W2ndUgYwUA==}
|
||||
|
||||
'@codemirror/view@6.35.0':
|
||||
resolution: {integrity: sha512-I0tYy63q5XkaWsJ8QRv5h6ves7kvtrBWjBcnf/bzohFJQc5c14a1AQRdE8QpPF9eMp5Mq2FMm59TCj1gDfE7kw==}
|
||||
'@codemirror/view@6.35.3':
|
||||
resolution: {integrity: sha512-ScY7L8+EGdPl4QtoBiOzE4FELp7JmNUsBvgBcCakXWM2uiv/K89VAzU3BMDscf0DsACLvTKePbd5+cFDTcei6g==}
|
||||
|
||||
'@csstools/selector-resolve-nested@1.1.0':
|
||||
resolution: {integrity: sha512-uWvSaeRcHyeNenKg8tp17EVDRkpflmdyvbE0DHo6D/GdBb6PDnCYYU6gRpXhtICMGMcahQmj2zGxwFM/WC8hCg==}
|
||||
@ -2036,6 +2036,9 @@ packages:
|
||||
resolution: {integrity: sha512-9QOtNffcOF/c1seMCDnjckb3R9WHcG34tky+FHpNKKCW0wc/scYLwMtO+ptyGUfMW0/b/n4qRiALlaFHc9Oj7Q==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
|
||||
'@marijn/find-cluster-break@1.0.2':
|
||||
resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==}
|
||||
|
||||
'@mdx-js/react@3.1.0':
|
||||
resolution: {integrity: sha512-QjHtSaoameoalGnKDT3FoIl4+9RwyTmo9ZJGBdLOks/YOiWHoRDI3PUwEzOE7kEmGcV3AFcp9K6dYu9rEuKLAQ==}
|
||||
peerDependencies:
|
||||
@ -8972,25 +8975,25 @@ snapshots:
|
||||
- '@chromatic-com/playwright'
|
||||
- react
|
||||
|
||||
'@codemirror/autocomplete@6.16.3(@codemirror/language@6.10.2)(@codemirror/state@6.4.1)(@codemirror/view@6.35.0)(@lezer/common@1.2.1)':
|
||||
'@codemirror/autocomplete@6.16.3(@codemirror/language@6.10.6)(@codemirror/state@6.5.0)(@codemirror/view@6.35.3)(@lezer/common@1.2.1)':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
'@lezer/common': 1.2.1
|
||||
|
||||
'@codemirror/commands@6.6.0':
|
||||
'@codemirror/commands@6.7.1':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
'@lezer/common': 1.2.1
|
||||
|
||||
'@codemirror/lang-css@6.3.0(@codemirror/view@6.35.0)':
|
||||
'@codemirror/lang-css@6.3.0(@codemirror/view@6.35.3)':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.2)(@codemirror/state@6.4.1)(@codemirror/view@6.35.0)(@lezer/common@1.2.1)
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.6)(@codemirror/state@6.5.0)(@codemirror/view@6.35.3)(@lezer/common@1.2.1)
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/state': 6.5.0
|
||||
'@lezer/common': 1.2.1
|
||||
'@lezer/css': 1.1.9
|
||||
transitivePeerDependencies:
|
||||
@ -8998,74 +9001,76 @@ snapshots:
|
||||
|
||||
'@codemirror/lang-html@6.4.9':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.2)(@codemirror/state@6.4.1)(@codemirror/view@6.35.0)(@lezer/common@1.2.1)
|
||||
'@codemirror/lang-css': 6.3.0(@codemirror/view@6.35.0)
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.6)(@codemirror/state@6.5.0)(@codemirror/view@6.35.3)(@lezer/common@1.2.1)
|
||||
'@codemirror/lang-css': 6.3.0(@codemirror/view@6.35.3)
|
||||
'@codemirror/lang-javascript': 6.2.2
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
'@lezer/common': 1.2.1
|
||||
'@lezer/css': 1.1.9
|
||||
'@lezer/html': 1.3.10
|
||||
|
||||
'@codemirror/lang-javascript@6.2.2':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.2)(@codemirror/state@6.4.1)(@codemirror/view@6.35.0)(@lezer/common@1.2.1)
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/lint': 6.8.1
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.6)(@codemirror/state@6.5.0)(@codemirror/view@6.35.3)(@lezer/common@1.2.1)
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/lint': 6.8.4
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
'@lezer/common': 1.2.1
|
||||
'@lezer/javascript': 1.4.18
|
||||
|
||||
'@codemirror/lang-json@6.0.1':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/language': 6.10.6
|
||||
'@lezer/json': 1.0.2
|
||||
|
||||
'@codemirror/lang-markdown@6.3.0':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.2)(@codemirror/state@6.4.1)(@codemirror/view@6.35.0)(@lezer/common@1.2.1)
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.6)(@codemirror/state@6.5.0)(@codemirror/view@6.35.3)(@lezer/common@1.2.1)
|
||||
'@codemirror/lang-html': 6.4.9
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
'@lezer/common': 1.2.1
|
||||
'@lezer/markdown': 1.3.1
|
||||
|
||||
'@codemirror/language@6.10.2':
|
||||
'@codemirror/language@6.10.6':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
'@lezer/common': 1.2.1
|
||||
'@lezer/highlight': 1.2.0
|
||||
'@lezer/lr': 1.4.1
|
||||
style-mod: 4.1.2
|
||||
|
||||
'@codemirror/lint@6.8.1':
|
||||
'@codemirror/lint@6.8.4':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
crelt: 1.0.6
|
||||
|
||||
'@codemirror/search@6.5.6':
|
||||
'@codemirror/search@6.5.8':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
crelt: 1.0.6
|
||||
|
||||
'@codemirror/state@6.4.1': {}
|
||||
'@codemirror/state@6.5.0':
|
||||
dependencies:
|
||||
'@marijn/find-cluster-break': 1.0.2
|
||||
|
||||
'@codemirror/theme-one-dark@6.1.2':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
'@lezer/highlight': 1.2.0
|
||||
|
||||
'@codemirror/view@6.35.0':
|
||||
'@codemirror/view@6.35.3':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/state': 6.5.0
|
||||
style-mod: 4.1.2
|
||||
w3c-keyname: 2.2.8
|
||||
|
||||
@ -9409,13 +9414,13 @@ snapshots:
|
||||
|
||||
'@histoire/controls@0.17.17(vite@5.4.10(@types/node@22.9.0)(lightningcss@1.25.1))':
|
||||
dependencies:
|
||||
'@codemirror/commands': 6.6.0
|
||||
'@codemirror/commands': 6.7.1
|
||||
'@codemirror/lang-json': 6.0.1
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/lint': 6.8.1
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/lint': 6.8.4
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/theme-one-dark': 6.1.2
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/view': 6.35.3
|
||||
'@histoire/shared': 0.17.17(vite@5.4.10(@types/node@22.9.0)(lightningcss@1.25.1))
|
||||
'@histoire/vendors': 0.17.17
|
||||
transitivePeerDependencies:
|
||||
@ -9675,6 +9680,8 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@marijn/find-cluster-break@1.0.2': {}
|
||||
|
||||
'@mdx-js/react@3.1.0(@types/react@18.3.3)(react@18.3.1)':
|
||||
dependencies:
|
||||
'@types/mdx': 2.0.13
|
||||
@ -12554,13 +12561,13 @@ snapshots:
|
||||
|
||||
codemirror@6.0.1(@lezer/common@1.2.1):
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.2)(@codemirror/state@6.4.1)(@codemirror/view@6.35.0)(@lezer/common@1.2.1)
|
||||
'@codemirror/commands': 6.6.0
|
||||
'@codemirror/language': 6.10.2
|
||||
'@codemirror/lint': 6.8.1
|
||||
'@codemirror/search': 6.5.6
|
||||
'@codemirror/state': 6.4.1
|
||||
'@codemirror/view': 6.35.0
|
||||
'@codemirror/autocomplete': 6.16.3(@codemirror/language@6.10.6)(@codemirror/state@6.5.0)(@codemirror/view@6.35.3)(@lezer/common@1.2.1)
|
||||
'@codemirror/commands': 6.7.1
|
||||
'@codemirror/language': 6.10.6
|
||||
'@codemirror/lint': 6.8.4
|
||||
'@codemirror/search': 6.5.8
|
||||
'@codemirror/state': 6.5.0
|
||||
'@codemirror/view': 6.35.3
|
||||
transitivePeerDependencies:
|
||||
- '@lezer/common'
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user