mirror of
https://github.com/enso-org/enso.git
synced 2024-11-21 16:36:59 +03:00
Move documentation into documentable types (#11441)
Move documentation into documentable types (implements #11302). # Important Notes GUI: - Distinguish expression and statement - `Ast.Ast` is still present, as the base class for AST objects. Most references to `Ast.Ast` are now references to `Ast.Expression`. Operations on blocks use `Ast.Statement`. - `Ast.parse` has been replaced with: `Ast.parseExpression`, `Ast.parseStatement`, and `Ast.parseBlock` - `syncToCode` is internally context-aware; it parses the provided code appropriately depending on whether its AST is an expression, a statement, or the top level of a module. - Remove `wrappingExpression` / `innerExpression` APIs: Wrapper types have been eliminated; modifier lines are now fields inside parent types. - Simplify AST printing: - Fully implemented autospacing in `concreteChildren` implementations; the type returned by `concreteChildren` now ensures that spacing has been fully resolved. - Eliminate `printBlock` / `printDocs`: `concreteChildren` is now aware of indentation context, and responsible for indentation of its child lines. - The `Pattern` type is now parameterized to identify the AST type it constructs. The `Pattern.parseExpression` function helps create a `Pattern<Expression>`. - Refactor `performCollape` for testability. - e2e tests: Improve table viz test: It still doesn't pass on my Mac, but these changes are necessary if not sufficient. Compiler (TreeToIr): - An expression in statement context is now found in an `ExpressionStatement` wrapper. - Documentation for a `Function` is now found inside the function node. - Deduplicate some polyglot-function logic.
This commit is contained in:
parent
d3beac3a90
commit
2b3bd2cc90
@ -2,6 +2,7 @@ import { test, type Page } from '@playwright/test'
|
||||
import * as actions from './actions'
|
||||
import { expect } from './customExpect'
|
||||
import { mockExpressionUpdate } from './expressionUpdates'
|
||||
import { CONTROL_KEY } from './keyboard'
|
||||
import * as locate from './locate'
|
||||
import { graphNodeByBinding } from './locate'
|
||||
|
||||
@ -36,19 +37,19 @@ test('Copy from Table Visualization', async ({ page, context }) => {
|
||||
await context.grantPermissions(['clipboard-read', 'clipboard-write'])
|
||||
await actions.goToGraph(page)
|
||||
|
||||
actions.openVisualization(page, 'Table')
|
||||
await actions.openVisualization(page, 'Table')
|
||||
const tableVisualization = locate.tableVisualization(page)
|
||||
await expect(tableVisualization).toExist()
|
||||
await tableVisualization.getByText('0,0').hover()
|
||||
await page.mouse.down()
|
||||
await tableVisualization.getByText('2,1').hover()
|
||||
await page.mouse.up()
|
||||
await page.keyboard.press('Control+C')
|
||||
await page.keyboard.press(`${CONTROL_KEY}+C`)
|
||||
|
||||
// Paste to Node.
|
||||
await actions.clickAtBackground(page)
|
||||
const nodesCount = await locate.graphNode(page).count()
|
||||
await page.keyboard.press('Control+V')
|
||||
await page.keyboard.press(`${CONTROL_KEY}+V`)
|
||||
await expect(locate.graphNode(page)).toHaveCount(nodesCount + 1)
|
||||
await expect(locate.graphNode(page).last().locator('input')).toHaveValue(
|
||||
'0,0\t0,11,0\t1,12,0\t2,1',
|
||||
@ -60,7 +61,7 @@ test('Copy from Table Visualization', async ({ page, context }) => {
|
||||
await expect(widget).toBeVisible()
|
||||
await widget.getByRole('button', { name: 'Add new column' }).click()
|
||||
await widget.locator('.ag-cell', { hasNotText: /0/ }).first().click()
|
||||
await page.keyboard.press('Control+V')
|
||||
await page.keyboard.press(`${CONTROL_KEY}+V`)
|
||||
await expect(widget.locator('.ag-cell')).toHaveText([
|
||||
'0',
|
||||
'0,0',
|
||||
|
@ -7,7 +7,6 @@ import * as React from 'react'
|
||||
|
||||
// This must not be a `symbol` as it cannot be sent to Playright.
|
||||
/** The type of the state returned by {@link useRefresh}. */
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
|
||||
export interface RefreshState {}
|
||||
|
||||
/** A hook that contains no state. It is used to trigger React re-renders. */
|
||||
|
@ -43,7 +43,6 @@ export interface LocalStorageKeyMetadata<K extends LocalStorageKey> {
|
||||
* The data that can be stored in a {@link LocalStorage}.
|
||||
* Declaration merge into this interface to add a new key.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
|
||||
export interface LocalStorageData {}
|
||||
|
||||
// =======================
|
||||
|
@ -26,7 +26,7 @@ export function useAI(
|
||||
const lsRpc = project.lsRpcConnection
|
||||
const sourceNodeId = graphDb.getIdentDefiningNode(sourceIdentifier)
|
||||
const contextId =
|
||||
sourceNodeId && graphDb.nodeIdToNode.get(sourceNodeId)?.outerExpr.externalId
|
||||
sourceNodeId && graphDb.nodeIdToNode.get(sourceNodeId)?.outerAst.externalId
|
||||
if (!contextId) return Err(`Cannot find node with name ${sourceIdentifier}`)
|
||||
|
||||
const prompt = await withContext(
|
||||
|
@ -52,6 +52,7 @@ import type { SuggestionId } from '@/stores/suggestionDatabase/entry'
|
||||
import { suggestionDocumentationUrl, type Typename } from '@/stores/suggestionDatabase/entry'
|
||||
import { provideVisualizationStore } from '@/stores/visualization'
|
||||
import { bail } from '@/util/assert'
|
||||
import { Ast } from '@/util/ast'
|
||||
import type { AstId } from '@/util/ast/abstract'
|
||||
import { colorFromString } from '@/util/colors'
|
||||
import { partition } from '@/util/data/array'
|
||||
@ -579,7 +580,7 @@ function clearFocus() {
|
||||
function createNodesFromSource(sourceNode: NodeId, options: NodeCreationOptions[]) {
|
||||
const sourcePort = graphStore.db.getNodeFirstOutputPort(sourceNode)
|
||||
if (sourcePort == null) return
|
||||
const sourcePortAst = graphStore.viewModule.get(sourcePort)
|
||||
const sourcePortAst = graphStore.viewModule.get(sourcePort) as Ast.Expression
|
||||
const [toCommit, toEdit] = partition(options, (opts) => opts.commit)
|
||||
createNodes(
|
||||
toCommit.map((options: NodeCreationOptions) => ({
|
||||
@ -631,14 +632,14 @@ function collapseNodes() {
|
||||
}
|
||||
const selectedNodeRects = filterDefined(Array.from(selected, graphStore.visibleArea))
|
||||
graphStore.edit((edit) => {
|
||||
const { refactoredExpressionAstId, collapsedNodeIds, outputAstId } = performCollapse(
|
||||
const { collapsedCallRoot, collapsedNodeIds, outputAstId } = performCollapse(
|
||||
info.value,
|
||||
edit.getVersion(topLevel),
|
||||
graphStore.db,
|
||||
currentMethodName,
|
||||
)
|
||||
const position = collapsedNodePlacement(selectedNodeRects)
|
||||
edit.get(refactoredExpressionAstId).mutableNodeMetadata().set('position', position.xy())
|
||||
edit.get(collapsedCallRoot).mutableNodeMetadata().set('position', position.xy())
|
||||
if (outputAstId != null) {
|
||||
const collapsedNodeRects = filterDefined(
|
||||
Array.from(collapsedNodeIds, graphStore.visibleArea),
|
||||
|
@ -101,7 +101,7 @@ function createEdge(source: AstId, target: PortId) {
|
||||
// Creating this edge would create a circular dependency. Prevent that and display error.
|
||||
toast.error('Could not connect due to circular dependency.')
|
||||
} else {
|
||||
const identAst = Ast.parse(ident, edit)
|
||||
const identAst = Ast.parseExpression(ident, edit)!
|
||||
if (!graph.updatePortValue(edit, target, identAst)) {
|
||||
if (isAstId(target)) {
|
||||
console.warn(`Failed to connect edge to port ${target}, falling back to direct edit.`)
|
||||
|
@ -13,10 +13,7 @@ const textEditor = ref<ComponentInstance<typeof PlainTextEditor>>()
|
||||
const textEditorContent = computed(() => textEditor.value?.contentElement)
|
||||
|
||||
const graphStore = useGraphStore()
|
||||
const { documentation } = useAstDocumentation(
|
||||
graphStore,
|
||||
() => props.node.docs ?? props.node.outerExpr,
|
||||
)
|
||||
const { documentation } = useAstDocumentation(graphStore, () => props.node.outerAst)
|
||||
|
||||
syncRef(editing, useFocusDelayed(textEditorContent).focused)
|
||||
</script>
|
||||
|
@ -126,7 +126,10 @@ export function useVisualizationData({
|
||||
const preprocessor = visPreprocessor.value
|
||||
const args = preprocessor.positionalArgumentsExpressions
|
||||
const tempModule = Ast.MutableModule.Transient()
|
||||
const preprocessorModule = Ast.parse(preprocessor.visualizationModule, tempModule)
|
||||
const preprocessorModule = Ast.parseExpression(
|
||||
preprocessor.visualizationModule,
|
||||
tempModule,
|
||||
)!
|
||||
// TODO[ao]: it work with builtin visualization, but does not work in general case.
|
||||
// Tracked in https://github.com/orgs/enso-org/discussions/6832#discussioncomment-7754474.
|
||||
if (!isIdentifier(preprocessor.expression)) {
|
||||
@ -140,9 +143,9 @@ export function useVisualizationData({
|
||||
)
|
||||
const preprocessorInvocation = Ast.App.PositionalSequence(preprocessorQn, [
|
||||
Ast.Wildcard.new(tempModule),
|
||||
...args.map((arg) => Ast.Group.new(tempModule, Ast.parse(arg, tempModule))),
|
||||
...args.map((arg) => Ast.Group.new(tempModule, Ast.parseExpression(arg, tempModule)!)),
|
||||
])
|
||||
const rhs = Ast.parse(dataSourceValue.expression, tempModule)
|
||||
const rhs = Ast.parseExpression(dataSourceValue.expression, tempModule)!
|
||||
const expression = Ast.OprApp.new(tempModule, preprocessorInvocation, '<|', rhs)
|
||||
return projectStore.executeExpression(dataSourceValue.contextId, expression.code())
|
||||
} catch (e) {
|
||||
|
@ -13,7 +13,7 @@ import { computed, toRef, watch } from 'vue'
|
||||
import { DisplayIcon } from './widgets/WidgetIcon.vue'
|
||||
|
||||
const props = defineProps<{
|
||||
ast: Ast.Ast
|
||||
ast: Ast.Expression
|
||||
nodeId: NodeId
|
||||
nodeElement: HTMLElement | undefined
|
||||
nodeType: NodeType
|
||||
|
@ -68,7 +68,7 @@ const testNodeInputs: {
|
||||
{ code: '## Documentation\nfoo = 2 + 2' },
|
||||
]
|
||||
const testNodes = testNodeInputs.map(({ code, visualization, colorOverride }) => {
|
||||
const root = Ast.Ast.parse(code)
|
||||
const root = [...Ast.parseBlock(code).statements()][0]!
|
||||
root.setNodeMetadata({ visualization, colorOverride })
|
||||
const node = nodeFromAst(root, false)
|
||||
assertDefined(node)
|
||||
@ -82,7 +82,9 @@ test.each([...testNodes.map((node) => [node]), testNodes])(
|
||||
const clipboardItem = clipboardItemFromTypes(nodesToClipboardData(sourceNodes))
|
||||
const pastedNodes = await nodesFromClipboardContent([clipboardItem])
|
||||
sourceNodes.forEach((sourceNode, i) => {
|
||||
expect(pastedNodes[i]?.documentation).toBe(sourceNode.docs?.documentation())
|
||||
const documentation =
|
||||
sourceNode.outerAst.isStatement() ? sourceNode.outerAst.documentationText() : undefined
|
||||
expect(pastedNodes[i]?.documentation).toBe(documentation)
|
||||
expect(pastedNodes[i]?.expression).toBe(sourceNode.innerExpr.code())
|
||||
expect(pastedNodes[i]?.metadata?.colorOverride).toBe(sourceNode.colorOverride)
|
||||
expect(pastedNodes[i]?.metadata?.visualization).toBe(sourceNode.vis)
|
||||
|
@ -1,11 +1,18 @@
|
||||
import { prepareCollapsedInfo } from '@/components/GraphEditor/collapsing'
|
||||
import { performCollapseImpl, prepareCollapsedInfo } from '@/components/GraphEditor/collapsing'
|
||||
import { GraphDb, type NodeId } from '@/stores/graph/graphDatabase'
|
||||
import { assert } from '@/util/assert'
|
||||
import { Ast, RawAst } from '@/util/ast'
|
||||
import { findExpressions } from '@/util/ast/__tests__/testCase'
|
||||
import { unwrap } from '@/util/data/result'
|
||||
import { tryIdentifier } from '@/util/qualifiedName'
|
||||
import { expect, test } from 'vitest'
|
||||
import { watchEffect } from 'vue'
|
||||
import { Identifier } from 'ydoc-shared/ast'
|
||||
import { nodeIdFromOuterAst } from '../../../stores/graph/graphDatabase'
|
||||
|
||||
// ===============================
|
||||
// === Collapse Analysis Tests ===
|
||||
// ===============================
|
||||
|
||||
function setupGraphDb(code: string, graphDb: GraphDb) {
|
||||
const { root, toRaw, getSpan } = Ast.parseExtended(code)
|
||||
@ -211,3 +218,73 @@ main =
|
||||
expect(refactored.pattern).toEqual('sum')
|
||||
expect(refactored.arguments).toEqual(['input', 'four'])
|
||||
})
|
||||
|
||||
// ================================
|
||||
// === Collapse Execution Tests ===
|
||||
// ================================
|
||||
|
||||
test('Perform collapse', () => {
|
||||
const root = Ast.parseModule(
|
||||
[
|
||||
'main =',
|
||||
' keep1 = 1',
|
||||
' extract1 = keep1',
|
||||
' keep2 = 2',
|
||||
' extract2 = extract1 + 1',
|
||||
' target = extract2',
|
||||
].join('\n'),
|
||||
)
|
||||
root.module.setRoot(root)
|
||||
const before = findExpressions(root, {
|
||||
'keep1 = 1': Ast.Assignment,
|
||||
'extract1 = keep1': Ast.Assignment,
|
||||
'keep2 = 2': Ast.Assignment,
|
||||
'extract2 = extract1 + 1': Ast.Assignment,
|
||||
'target = extract2': Ast.Assignment,
|
||||
})
|
||||
const statementsToExtract = new Set<Ast.AstId>()
|
||||
const statementToReplace = before['target = extract2'].id
|
||||
statementsToExtract.add(before['extract1 = keep1'].id)
|
||||
statementsToExtract.add(before['extract2 = extract1 + 1'].id)
|
||||
statementsToExtract.add(statementToReplace)
|
||||
const { collapsedCallRoot, outputAstId, collapsedNodeIds } = performCollapseImpl(
|
||||
root,
|
||||
{
|
||||
args: ['keep1' as Identifier],
|
||||
statementsToExtract,
|
||||
statementToReplace: before['target = extract2'].id,
|
||||
},
|
||||
'main',
|
||||
)
|
||||
expect(root.code()).toBe(
|
||||
[
|
||||
'## ICON group',
|
||||
'collapsed keep1 =',
|
||||
' extract1 = keep1',
|
||||
' extract2 = extract1 + 1',
|
||||
' target = extract2',
|
||||
' target',
|
||||
'',
|
||||
'main =',
|
||||
' keep1 = 1',
|
||||
' keep2 = 2',
|
||||
' target = Main.collapsed keep1',
|
||||
].join('\n'),
|
||||
)
|
||||
const after = findExpressions(root, {
|
||||
'extract1 = keep1': Ast.Assignment,
|
||||
'extract2 = extract1 + 1': Ast.Assignment,
|
||||
'target = extract2': Ast.Assignment,
|
||||
target: Ast.ExpressionStatement,
|
||||
'keep1 = 1': Ast.Assignment,
|
||||
'keep2 = 2': Ast.Assignment,
|
||||
'target = Main.collapsed keep1': Ast.Assignment,
|
||||
})
|
||||
expect(collapsedNodeIds).toStrictEqual(
|
||||
[after['target = extract2'], after['extract2 = extract1 + 1'], after['extract1 = keep1']].map(
|
||||
nodeIdFromOuterAst,
|
||||
),
|
||||
)
|
||||
expect(outputAstId).toBe(after['target'].expression.id)
|
||||
expect(collapsedCallRoot).toBe(after['target = Main.collapsed keep1'].expression.id)
|
||||
})
|
||||
|
@ -148,7 +148,7 @@ const spreadsheetDecoder: ClipboardDecoder<CopiedNode[]> = {
|
||||
},
|
||||
}
|
||||
|
||||
const toTable = computed(() => Pattern.parse('__.to Table'))
|
||||
const toTable = computed(() => Pattern.parseExpression('__.to Table'))
|
||||
|
||||
/** Create Enso Expression generating table from this tsvData. */
|
||||
export function tsvTableToEnsoExpression(tsvData: string) {
|
||||
@ -186,9 +186,10 @@ export function writeClipboard(data: MimeData) {
|
||||
// === Serializing nodes ===
|
||||
|
||||
function nodeStructuredData(node: Node): CopiedNode {
|
||||
const documentation = node.outerAst.isStatement() ? node.outerAst.documentationText() : undefined
|
||||
return {
|
||||
expression: node.innerExpr.code(),
|
||||
documentation: node.docs?.documentation(),
|
||||
documentation,
|
||||
metadata: node.rootExpr.serializeMetadata(),
|
||||
...(node.pattern ? { binding: node.pattern.code() } : {}),
|
||||
}
|
||||
@ -204,6 +205,6 @@ export function clipboardNodeData(nodes: CopiedNode[]): MimeData {
|
||||
export function nodesToClipboardData(nodes: Node[]): MimeData {
|
||||
return {
|
||||
...clipboardNodeData(nodes.map(nodeStructuredData)),
|
||||
'text/plain': nodes.map((node) => node.outerExpr.code()).join('\n'),
|
||||
'text/plain': nodes.map((node) => node.outerAst.code()).join('\n'),
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,9 @@
|
||||
import { asNodeId, GraphDb, nodeIdFromOuterExpr, type NodeId } from '@/stores/graph/graphDatabase'
|
||||
import { assert, assertDefined } from '@/util/assert'
|
||||
import { GraphDb, NodeId, nodeIdFromOuterAst } from '@/stores/graph/graphDatabase'
|
||||
import { assert } from '@/util/assert'
|
||||
import { Ast } from '@/util/ast'
|
||||
import { autospaced, isIdentifier, moduleMethodNames, type Identifier } from '@/util/ast/abstract'
|
||||
import { filterDefined } from '@/util/data/iterable'
|
||||
import { Err, Ok, unwrap, type Result } from '@/util/data/result'
|
||||
import {
|
||||
isIdentifierOrOperatorIdentifier,
|
||||
tryIdentifier,
|
||||
type IdentifierOrOperatorIdentifier,
|
||||
} from '@/util/qualifiedName'
|
||||
import { Identifier, isIdentifier, moduleMethodNames } from '@/util/ast/abstract'
|
||||
import { Err, Ok, Result, unwrap } from '@/util/data/result'
|
||||
import { tryIdentifier } from '@/util/qualifiedName'
|
||||
import * as set from 'lib0/set'
|
||||
|
||||
// === Types ===
|
||||
@ -24,7 +19,7 @@ interface ExtractedInfo {
|
||||
/** Nodes with these ids should be moved to the function body, in their original order. */
|
||||
ids: Set<NodeId>
|
||||
/** The output information of the function. */
|
||||
output: Output | null
|
||||
output: Output
|
||||
/** The list of extracted function’s argument names. */
|
||||
inputs: Identifier[]
|
||||
}
|
||||
@ -110,9 +105,11 @@ export function prepareCollapsedInfo(
|
||||
output = { node: arbitraryLeaf, identifier }
|
||||
}
|
||||
|
||||
const pattern = graphDb.nodeIdToNode.get(output.node)?.pattern?.code() ?? ''
|
||||
assert(isIdentifier(pattern))
|
||||
const pattern = graphDb.nodeIdToNode.get(output.node)?.pattern?.code()
|
||||
assert(pattern != null && isIdentifier(pattern))
|
||||
const inputs = Array.from(inputSet)
|
||||
|
||||
assert(selected.has(output.node))
|
||||
return Ok({
|
||||
extracted: {
|
||||
ids: selected,
|
||||
@ -128,10 +125,7 @@ export function prepareCollapsedInfo(
|
||||
}
|
||||
|
||||
/** Generate a safe method name for a collapsed function using `baseName` as a prefix. */
|
||||
function findSafeMethodName(
|
||||
topLevel: Ast.BodyBlock,
|
||||
baseName: IdentifierOrOperatorIdentifier,
|
||||
): IdentifierOrOperatorIdentifier {
|
||||
function findSafeMethodName(topLevel: Ast.BodyBlock, baseName: Identifier): Identifier {
|
||||
const allIdentifiers = moduleMethodNames(topLevel)
|
||||
if (!allIdentifiers.has(baseName)) {
|
||||
return baseName
|
||||
@ -141,107 +135,98 @@ function findSafeMethodName(
|
||||
index++
|
||||
}
|
||||
const name = `${baseName}${index}`
|
||||
assert(isIdentifierOrOperatorIdentifier(name))
|
||||
assert(isIdentifier(name))
|
||||
return name
|
||||
}
|
||||
|
||||
// === performCollapse ===
|
||||
|
||||
// We support working inside `Main` module of the project at the moment.
|
||||
const MODULE_NAME = 'Main' as IdentifierOrOperatorIdentifier
|
||||
const COLLAPSED_FUNCTION_NAME = 'collapsed' as IdentifierOrOperatorIdentifier
|
||||
const MODULE_NAME = 'Main' as Identifier
|
||||
const COLLAPSED_FUNCTION_NAME = 'collapsed' as Identifier
|
||||
|
||||
interface CollapsingResult {
|
||||
/** The ID of the node refactored to the collapsed function call. */
|
||||
refactoredNodeId: NodeId
|
||||
refactoredExpressionAstId: Ast.AstId
|
||||
collapsedCallRoot: Ast.AstId
|
||||
/**
|
||||
* IDs of nodes inside the collapsed function, except the output node.
|
||||
* The order of these IDs is reversed comparing to the order of nodes in the source code.
|
||||
*/
|
||||
collapsedNodeIds: NodeId[]
|
||||
/** ID of the output AST node inside the collapsed function. */
|
||||
outputAstId?: Ast.AstId | undefined
|
||||
outputAstId: Ast.AstId
|
||||
}
|
||||
|
||||
interface PreparedCollapseInfo {
|
||||
args: Identifier[]
|
||||
statementsToExtract: Set<Ast.AstId>
|
||||
statementToReplace: Ast.AstId
|
||||
}
|
||||
|
||||
/** Perform the actual AST refactoring for collapsing nodes. */
|
||||
export function performCollapse(
|
||||
info: CollapsedInfo,
|
||||
topLevel: Ast.MutableBodyBlock,
|
||||
db: GraphDb,
|
||||
graphDb: GraphDb,
|
||||
currentMethodName: string,
|
||||
): CollapsingResult {
|
||||
const nodeIdToStatementId = (nodeId: NodeId) => graphDb.nodeIdToNode.get(nodeId)!.outerAst.id
|
||||
const preparedInfo = {
|
||||
args: info.extracted.inputs,
|
||||
statementsToExtract: new Set([...info.extracted.ids].map(nodeIdToStatementId)),
|
||||
statementToReplace: nodeIdToStatementId(info.refactored.id),
|
||||
outputIdentifier: info.extracted.output.identifier,
|
||||
}
|
||||
return performCollapseImpl(topLevel, preparedInfo, currentMethodName)
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export function performCollapseImpl(
|
||||
topLevel: Ast.MutableBodyBlock,
|
||||
info: PreparedCollapseInfo,
|
||||
currentMethodName: string,
|
||||
) {
|
||||
const edit = topLevel.module
|
||||
const functionAst = Ast.findModuleMethod(topLevel, currentMethodName)
|
||||
assertDefined(functionAst)
|
||||
const functionBlock = edit.getVersion(functionAst).bodyAsBlock()
|
||||
const posToInsert = findInsertionPos(topLevel, currentMethodName)
|
||||
const collapsedName = findSafeMethodName(topLevel, COLLAPSED_FUNCTION_NAME)
|
||||
const astIdsToExtract = new Set(
|
||||
[...info.extracted.ids].map((nodeId) => db.nodeIdToNode.get(nodeId)?.outerExpr.id),
|
||||
)
|
||||
const astIdToReplace = db.nodeIdToNode.get(info.refactored.id)?.outerExpr.id
|
||||
const {
|
||||
ast: refactoredAst,
|
||||
nodeId: refactoredNodeId,
|
||||
expressionAstId: refactoredExpressionAstId,
|
||||
} = collapsedCallAst(info, collapsedName, edit)
|
||||
const collapsed: Ast.Owned[] = []
|
||||
const { statement: currentMethod, index: currentMethodLine } = Ast.findModuleMethod(
|
||||
topLevel,
|
||||
currentMethodName,
|
||||
)!
|
||||
|
||||
// Update the definition of the refactored function.
|
||||
functionBlock.updateLines((lines) => {
|
||||
const refactored: Ast.OwnedBlockLine[] = []
|
||||
for (const line of lines) {
|
||||
const ast = line.expression?.node
|
||||
if (!ast) continue
|
||||
if (astIdsToExtract.has(ast.id)) {
|
||||
collapsed.push(ast)
|
||||
if (ast.id === astIdToReplace) {
|
||||
refactored.push({ expression: autospaced(refactoredAst) })
|
||||
}
|
||||
} else {
|
||||
refactored.push(line)
|
||||
}
|
||||
}
|
||||
return refactored
|
||||
const extractedLines = currentMethod
|
||||
.bodyAsBlock()
|
||||
.extractIf(({ id }) => info.statementsToExtract.has(id) && id !== info.statementToReplace)
|
||||
const collapsedCall = Ast.App.PositionalSequence(
|
||||
Ast.PropertyAccess.new(edit, Ast.Ident.new(edit, MODULE_NAME), collapsedName),
|
||||
info.args.map((arg) => Ast.Ident.new(edit, arg)),
|
||||
)
|
||||
const statementToReplace = edit.get(info.statementToReplace)
|
||||
assert(statementToReplace instanceof Ast.MutableAssignment)
|
||||
const outputIdentifier = statementToReplace.pattern.code() as Identifier
|
||||
extractedLines.push({
|
||||
statement: {
|
||||
whitespace: undefined,
|
||||
node: statementToReplace.replace(
|
||||
Ast.Assignment.new(outputIdentifier, collapsedCall, { edit }),
|
||||
),
|
||||
},
|
||||
})
|
||||
|
||||
const collapsedNodeIds = extractedLines
|
||||
.map(({ statement }) => statement && nodeIdFromOuterAst(statement.node))
|
||||
.filter((id) => id != null)
|
||||
.reverse()
|
||||
|
||||
// Insert a new function.
|
||||
const collapsedNodeIds = [...filterDefined(collapsed.map(nodeIdFromOuterExpr))].reverse()
|
||||
let outputAstId: Ast.AstId | undefined
|
||||
const outputIdentifier = info.extracted.output?.identifier
|
||||
if (outputIdentifier != null) {
|
||||
const ident = Ast.Ident.new(edit, outputIdentifier)
|
||||
collapsed.push(ident)
|
||||
outputAstId = ident.id
|
||||
}
|
||||
const argNames = info.extracted.inputs
|
||||
const collapsedFunction = Ast.Function.fromStatements(edit, collapsedName, argNames, collapsed)
|
||||
const collapsedFunctionWithIcon = Ast.Documented.new('ICON group', collapsedFunction)
|
||||
topLevel.insert(posToInsert, collapsedFunctionWithIcon, undefined)
|
||||
return { refactoredNodeId, refactoredExpressionAstId, collapsedNodeIds, outputAstId }
|
||||
}
|
||||
|
||||
/** Prepare a method call expression for collapsed method. */
|
||||
function collapsedCallAst(
|
||||
info: CollapsedInfo,
|
||||
collapsedName: IdentifierOrOperatorIdentifier,
|
||||
edit: Ast.MutableModule,
|
||||
): { ast: Ast.Owned; expressionAstId: Ast.AstId; nodeId: NodeId } {
|
||||
const pattern = info.refactored.pattern
|
||||
const args = info.refactored.arguments
|
||||
const functionName = `${MODULE_NAME}.${collapsedName}`
|
||||
const expression = functionName + (args.length > 0 ? ' ' : '') + args.join(' ')
|
||||
const expressionAst = Ast.parse(expression, edit)
|
||||
const ast = Ast.Assignment.new(edit, pattern, expressionAst)
|
||||
return { ast, expressionAstId: expressionAst.id, nodeId: asNodeId(expressionAst.externalId) }
|
||||
}
|
||||
|
||||
/** Find the position before the current method to insert a collapsed one. */
|
||||
function findInsertionPos(topLevel: Ast.BodyBlock, currentMethodName: string): number {
|
||||
const currentFuncPosition = topLevel.lines.findIndex((line) => {
|
||||
const expr = line.expression?.node?.innerExpression()
|
||||
return expr instanceof Ast.Function && expr.name?.code() === currentMethodName
|
||||
const collapsedBody = Ast.BodyBlock.new(extractedLines, edit)
|
||||
const outputAst = Ast.Ident.new(edit, outputIdentifier)
|
||||
collapsedBody.push(outputAst)
|
||||
const collapsedFunction = Ast.Function.new(collapsedName, info.args, collapsedBody, {
|
||||
edit,
|
||||
documentation: 'ICON group',
|
||||
})
|
||||
topLevel.insert(currentMethodLine, collapsedFunction, undefined)
|
||||
|
||||
return currentFuncPosition === -1 ? 0 : currentFuncPosition
|
||||
return { collapsedCallRoot: collapsedCall.id, outputAstId: outputAst.id, collapsedNodeIds }
|
||||
}
|
||||
|
@ -45,7 +45,10 @@ const operatorStyle = computed(() => {
|
||||
application.value.appTree instanceof Ast.OprApp ||
|
||||
application.value.appTree instanceof Ast.PropertyAccess
|
||||
) {
|
||||
const [_lhs, opr, rhs] = application.value.appTree.concreteChildren()
|
||||
const [_lhs, opr, rhs] = application.value.appTree.concreteChildren({
|
||||
verbatim: true,
|
||||
indent: '',
|
||||
})
|
||||
return {
|
||||
'--whitespace-pre': `${JSON.stringify(opr?.whitespace ?? '')}`,
|
||||
'--whitespace-post': `${JSON.stringify(rhs?.whitespace ?? '')}`,
|
||||
|
@ -64,14 +64,14 @@ const argumentName = computed(() => {
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
function isBoolNode(ast: Ast.Ast) {
|
||||
function isBoolNode(ast: Ast.Expression) {
|
||||
const candidate =
|
||||
ast instanceof Ast.PropertyAccess && ast.lhs?.code() === 'Boolean' ? ast.rhs
|
||||
: ast instanceof Ast.Ident ? ast.token
|
||||
: undefined
|
||||
return candidate && ['True', 'False'].includes(candidate.code())
|
||||
}
|
||||
function setBoolNode(ast: Ast.Mutable, value: Identifier): { requiresImport: boolean } {
|
||||
function setBoolNode(ast: Ast.MutableExpression, value: Identifier): { requiresImport: boolean } {
|
||||
if (ast instanceof Ast.MutablePropertyAccess) {
|
||||
ast.setRhs(value)
|
||||
return { requiresImport: false }
|
||||
|
@ -53,16 +53,15 @@ const label = computed(() => {
|
||||
}
|
||||
})
|
||||
|
||||
const fileConPattern = Pattern.parse(`${FILE_TYPE}.new __`)
|
||||
const fileShortConPattern = Pattern.parse(`File.new __`)
|
||||
const fileConPattern = Pattern.parseExpression(`${FILE_TYPE}.new __`)
|
||||
const fileShortConPattern = Pattern.parseExpression(`File.new __`)
|
||||
const currentPath = computed(() => {
|
||||
if (typeof props.input.value === 'string') {
|
||||
return props.input.value
|
||||
} else if (props.input.value) {
|
||||
const expression = props.input.value.innerExpression()
|
||||
const expression = props.input.value
|
||||
const match = fileShortConPattern.match(expression) ?? fileConPattern.match(expression)
|
||||
const pathAst =
|
||||
match && match[0] ? expression.module.get(match[0]).innerExpression() : expression
|
||||
const pathAst = match && match[0] ? expression.module.get(match[0]) : expression
|
||||
if (pathAst instanceof TextLiteral) {
|
||||
return pathAst.rawTextContent
|
||||
}
|
||||
@ -70,7 +69,11 @@ const currentPath = computed(() => {
|
||||
return undefined
|
||||
})
|
||||
|
||||
function makeValue(edit: Ast.MutableModule, useFileConstructor: boolean, path: string): Ast.Owned {
|
||||
function makeValue(
|
||||
edit: Ast.MutableModule,
|
||||
useFileConstructor: boolean,
|
||||
path: string,
|
||||
): Ast.Owned<Ast.MutableExpression> {
|
||||
if (useFileConstructor) {
|
||||
const arg = Ast.TextLiteral.new(path, edit)
|
||||
const requiredImport = {
|
||||
|
@ -101,11 +101,11 @@ function handleArgUpdate(update: WidgetUpdate): boolean {
|
||||
// Perform appropriate AST update, either insertion or deletion.
|
||||
if (value != null && argApp?.argument instanceof ArgumentPlaceholder) {
|
||||
/* Case: Inserting value to a placeholder. */
|
||||
let newArg: Ast.Owned
|
||||
let newArg: Ast.Owned<Ast.MutableExpression>
|
||||
if (value instanceof Ast.Ast) {
|
||||
newArg = value
|
||||
} else {
|
||||
newArg = Ast.parse(value, edit)
|
||||
newArg = Ast.parseExpression(value, edit)!
|
||||
}
|
||||
const name =
|
||||
argApp.argument.insertAsNamed && isIdentifier(argApp.argument.argInfo.name) ?
|
||||
@ -148,8 +148,7 @@ function handleArgUpdate(update: WidgetUpdate): boolean {
|
||||
|
||||
// Named argument can always be removed immediately. Replace the whole application with its
|
||||
// target, effectively removing the argument from the call.
|
||||
const func = edit.take(argApp.appTree.function.id)
|
||||
assert(func != null)
|
||||
const func = edit.getVersion(argApp.appTree.function).take()
|
||||
props.onUpdate({
|
||||
edit,
|
||||
portUpdate: {
|
||||
@ -163,7 +162,7 @@ function handleArgUpdate(update: WidgetUpdate): boolean {
|
||||
|
||||
// Infix application is removed as a whole. Only the target is kept.
|
||||
if (argApp.appTree.lhs) {
|
||||
const lhs = edit.take(argApp.appTree.lhs.id)
|
||||
const lhs = edit.getVersion(argApp.appTree.lhs).take()
|
||||
props.onUpdate({
|
||||
edit,
|
||||
portUpdate: {
|
||||
@ -188,9 +187,9 @@ function handleArgUpdate(update: WidgetUpdate): boolean {
|
||||
const appTree = edit.getVersion(argApp.appTree)
|
||||
if (graph.db.isNodeId(appTree.externalId)) {
|
||||
// If the modified application is a node root, preserve its identity and metadata.
|
||||
appTree.replaceValue(appTree.function.take())
|
||||
appTree.updateValue((appTree) => appTree.function.take())
|
||||
} else {
|
||||
appTree.replace(appTree.function.take())
|
||||
appTree.update((appTree) => appTree.function.take())
|
||||
}
|
||||
props.onUpdate({ edit })
|
||||
return true
|
||||
|
@ -60,13 +60,13 @@ test.each`
|
||||
...(attachedSpan != null ? { attached: attachedSpan as [number, number] } : {}),
|
||||
}
|
||||
const { ast, eid, id } = parseWithSpans(code, spans)
|
||||
const line = ast.lines[0]?.expression
|
||||
assert(line != null)
|
||||
expect(line.node.externalId).toBe(eid('entireFunction'))
|
||||
const node = (ast.lines[0]?.statement?.node as Ast.ExpressionStatement).expression
|
||||
assert(node != null)
|
||||
expect(node.externalId).toBe(eid('entireFunction'))
|
||||
|
||||
let visConfig: Ref<Opt<NodeVisualizationConfiguration>> | undefined
|
||||
useWidgetFunctionCallInfo(
|
||||
WidgetInput.FromAst(line.node),
|
||||
WidgetInput.FromAst(node),
|
||||
{
|
||||
getMethodCallInfo(astId) {
|
||||
if (astId === id('entireFunction')) {
|
||||
@ -93,7 +93,7 @@ test.each`
|
||||
},
|
||||
{
|
||||
useVisualizationData(config) {
|
||||
expect(visConfig, 'Only one visualizaiton is expected').toBeUndefined()
|
||||
expect(visConfig, 'Only one visualization is expected').toBeUndefined()
|
||||
visConfig = config
|
||||
return ref(null)
|
||||
},
|
||||
|
@ -29,7 +29,7 @@ export const GET_WIDGETS_METHOD = 'get_widget_json'
|
||||
* expression updates.
|
||||
*/
|
||||
export function useWidgetFunctionCallInfo(
|
||||
input: ToValue<WidgetInput & { value: Ast.Ast }>,
|
||||
input: ToValue<WidgetInput & { value: Ast.Expression }>,
|
||||
graphDb: {
|
||||
getMethodCallInfo(id: AstId): MethodCallInfo | undefined
|
||||
getExpressionInfo(id: AstId): ExpressionInfo | undefined
|
||||
|
@ -3,12 +3,23 @@ import NodeWidget from '@/components/GraphEditor/NodeWidget.vue'
|
||||
import { WidgetInput, defineWidget, widgetProps } from '@/providers/widgetRegistry'
|
||||
import { Ast } from '@/util/ast'
|
||||
import { computed } from 'vue'
|
||||
import { isToken } from 'ydoc-shared/ast'
|
||||
|
||||
const props = defineProps(widgetProps(widgetDefinition))
|
||||
|
||||
const spanClass = computed(() => props.input.value.typeName())
|
||||
|
||||
function transformChild(child: Ast.Ast | Ast.Token) {
|
||||
function* expressionChildren(expression: Ast.Expression) {
|
||||
for (const child of expression.children()) {
|
||||
if (isToken(child) || child.isExpression()) {
|
||||
yield child
|
||||
} else {
|
||||
console.error('Unable to render non-expression AST node in component', child)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function transformChild(child: Ast.Expression | Ast.Token) {
|
||||
const childInput = WidgetInput.FromAst(child)
|
||||
if (props.input.value instanceof Ast.PropertyAccess && child.id === props.input.value.lhs?.id)
|
||||
childInput.forcePort = true
|
||||
@ -36,8 +47,8 @@ export const widgetDefinition = defineWidget(
|
||||
<template>
|
||||
<div class="WidgetHierarchy" :class="spanClass">
|
||||
<NodeWidget
|
||||
v-for="(child, index) in props.input.value.children()"
|
||||
:key="child.id ?? index"
|
||||
v-for="child in expressionChildren(props.input.value)"
|
||||
:key="child.id"
|
||||
:input="transformChild(child)"
|
||||
/>
|
||||
</div>
|
||||
|
@ -42,7 +42,7 @@ const dropdownElement = ref<HTMLElement>()
|
||||
const activityElement = ref<HTMLElement>()
|
||||
|
||||
const editedWidget = ref<string>()
|
||||
const editedValue = ref<Ast.Owned | string | undefined>()
|
||||
const editedValue = ref<Ast.Owned<Ast.MutableExpression> | string | undefined>()
|
||||
const isHovered = ref(false)
|
||||
/** See @{link Actions.setActivity} */
|
||||
const activity = shallowRef<VNode>()
|
||||
@ -96,7 +96,7 @@ const { floatingStyles } = dropdownStyles(dropdownElement, true)
|
||||
const { floatingStyles: activityStyles } = dropdownStyles(activityElement, false)
|
||||
|
||||
class ExpressionTag {
|
||||
private cachedExpressionAst: Ast.Ast | undefined
|
||||
private cachedExpressionAst: Ast.Expression | undefined
|
||||
|
||||
constructor(
|
||||
readonly expression: string,
|
||||
@ -135,7 +135,7 @@ class ExpressionTag {
|
||||
|
||||
get expressionAst() {
|
||||
if (this.cachedExpressionAst == null) {
|
||||
this.cachedExpressionAst = Ast.parse(this.expression)
|
||||
this.cachedExpressionAst = Ast.parseExpression(this.expression)
|
||||
}
|
||||
return this.cachedExpressionAst
|
||||
}
|
||||
@ -154,7 +154,7 @@ class ActionTag {
|
||||
|
||||
type ExpressionFilter = (tag: ExpressionTag) => boolean
|
||||
function makeExpressionFilter(pattern: Ast.Ast | string): ExpressionFilter | undefined {
|
||||
const editedAst = typeof pattern === 'string' ? Ast.parse(pattern) : pattern
|
||||
const editedAst = typeof pattern === 'string' ? Ast.parseExpression(pattern) : pattern
|
||||
const editedCode = pattern instanceof Ast.Ast ? pattern.code() : pattern
|
||||
if (editedAst instanceof Ast.TextLiteral) {
|
||||
return (tag: ExpressionTag) =>
|
||||
@ -249,11 +249,7 @@ provideSelectionArrow(
|
||||
if (node instanceof Ast.AutoscopedIdentifier) return node.identifier.id
|
||||
if (node instanceof Ast.PropertyAccess) return node.rhs.id
|
||||
if (node instanceof Ast.App) node = node.function
|
||||
else {
|
||||
const wrapped = node.wrappedExpression()
|
||||
if (wrapped != null) node = wrapped
|
||||
else break
|
||||
}
|
||||
else break
|
||||
}
|
||||
return null
|
||||
}),
|
||||
@ -369,7 +365,7 @@ function toggleVectorValue(vector: Ast.MutableVector, value: string, previousSta
|
||||
if (previousState) {
|
||||
vector.keep((ast) => ast.code() !== value)
|
||||
} else {
|
||||
vector.push(Ast.parse(value, vector.module))
|
||||
vector.push(Ast.parseExpression(value, vector.module)!)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,7 @@ const displayedIcon = computed(() => {
|
||||
const iconInput = computed(() => {
|
||||
const lhs = props.input.value.lhs
|
||||
if (!lhs) return
|
||||
const input = WidgetInput.FromAstWithPort(lhs)
|
||||
const input = WidgetInput.WithPort(WidgetInput.FromAst(lhs))
|
||||
const icon = displayedIcon.value
|
||||
if (icon) input[DisplayIcon] = { icon, showContents: showFullAccessChain.value }
|
||||
return input
|
||||
|
@ -88,7 +88,7 @@ test.each([
|
||||
],
|
||||
},
|
||||
])('Read table from $code', ({ code, expectedColumnDefs, expectedRows }) => {
|
||||
const ast = Ast.parse(code)
|
||||
const ast = Ast.parseExpression(code)!
|
||||
expect(tableNewCallMayBeHandled(ast)).toBeTruthy()
|
||||
const input = WidgetInput.FromAst(ast)
|
||||
const startEdit = vi.fn()
|
||||
@ -177,14 +177,15 @@ test.each([
|
||||
"Table.new [['a', [123]], ['a'.repeat 170, [123]]]",
|
||||
"Table.new [['a', [1, 2, 3, 3 + 1]]]",
|
||||
])('"%s" is not valid input for Table Editor Widget', (code) => {
|
||||
const ast = Ast.parse(code)
|
||||
const ast = Ast.parseExpression(code)!
|
||||
expect(tableNewCallMayBeHandled(ast)).toBeFalsy()
|
||||
})
|
||||
|
||||
function tableEditFixture(code: string, expectedCode: string) {
|
||||
const ast = Ast.parseBlock(code)
|
||||
const inputAst = [...ast.statements()][0]
|
||||
assert(inputAst != null)
|
||||
const firstStatement = [...ast.statements()][0]
|
||||
assert(firstStatement instanceof Ast.MutableExpressionStatement)
|
||||
const inputAst = firstStatement.expression
|
||||
const input = WidgetInput.FromAst(inputAst)
|
||||
const startEdit = vi.fn(() => ast.module.edit())
|
||||
const onUpdate = vi.fn((update) => {
|
||||
|
@ -50,7 +50,7 @@ export interface ColumnDef extends ColDef<RowData> {
|
||||
|
||||
namespace cellValueConversion {
|
||||
/** Convert AST node to a value for Grid (to be returned from valueGetter, for example). */
|
||||
export function astToAgGrid(ast: Ast.Ast) {
|
||||
export function astToAgGrid(ast: Ast.Expression) {
|
||||
if (ast instanceof Ast.TextLiteral) return Ok(ast.rawTextContent)
|
||||
else if (ast instanceof Ast.Ident && ast.code() === NOTHING_NAME) return Ok(null)
|
||||
else if (ast instanceof Ast.PropertyAccess && ast.rhs.code() === NOTHING_NAME) return Ok(null)
|
||||
@ -69,7 +69,7 @@ namespace cellValueConversion {
|
||||
export function agGridToAst(
|
||||
value: unknown,
|
||||
module: Ast.MutableModule,
|
||||
): { ast: Ast.Owned; requireNothingImport: boolean } {
|
||||
): { ast: Ast.Owned<Ast.MutableExpression>; requireNothingImport: boolean } {
|
||||
if (value == null || value === '') {
|
||||
return { ast: Ast.Ident.new(module, 'Nothing' as Ast.Identifier), requireNothingImport: true }
|
||||
} else if (typeof value === 'number') {
|
||||
@ -88,7 +88,7 @@ namespace cellValueConversion {
|
||||
}
|
||||
}
|
||||
|
||||
function retrieveColumnsAst(call: Ast.Ast) {
|
||||
function retrieveColumnsAst(call: Ast.Expression): Result<Ast.Vector | undefined> {
|
||||
if (!(call instanceof Ast.App)) return Ok(undefined)
|
||||
if (call.argument instanceof Ast.Vector) return Ok(call.argument)
|
||||
if (call.argument instanceof Ast.Wildcard) return Ok(undefined)
|
||||
@ -96,7 +96,7 @@ function retrieveColumnsAst(call: Ast.Ast) {
|
||||
}
|
||||
|
||||
function readColumn(
|
||||
ast: Ast.Ast,
|
||||
ast: Ast.Expression,
|
||||
): Result<{ id: Ast.AstId; name: Ast.TextLiteral; data: Ast.Vector }> {
|
||||
const errormsg = () => `${ast.code} is not a vector of two elements`
|
||||
if (!(ast instanceof Ast.Vector)) return Err(errormsg())
|
||||
@ -125,7 +125,7 @@ function retrieveColumnsDefinitions(columnsAst: Ast.Vector) {
|
||||
*
|
||||
* This widget may handle table definitions filled with literals or `Nothing` values.
|
||||
*/
|
||||
export function tableNewCallMayBeHandled(call: Ast.Ast) {
|
||||
export function tableNewCallMayBeHandled(call: Ast.Expression) {
|
||||
const columnsAst = retrieveColumnsAst(call)
|
||||
if (!columnsAst.ok) return false
|
||||
if (!columnsAst.value) return true // We can handle lack of the argument
|
||||
@ -147,7 +147,7 @@ export function tableNewCallMayBeHandled(call: Ast.Ast) {
|
||||
* @param onUpdate callback called when AGGrid was edited by user, resulting in AST change.
|
||||
*/
|
||||
export function useTableNewArgument(
|
||||
input: ToValue<WidgetInput & { value: Ast.Ast }>,
|
||||
input: ToValue<WidgetInput & { value: Ast.Expression }>,
|
||||
graph: {
|
||||
startEdit(): Ast.MutableModule
|
||||
addMissingImports(edit: Ast.MutableModule, newImports: RequiredImport[]): void
|
||||
@ -343,7 +343,7 @@ export function useTableNewArgument(
|
||||
if (data == null) return undefined
|
||||
const ast = toValue(input).value.module.tryGet(data.cells[col.data.id])
|
||||
if (ast == null) return null
|
||||
const value = cellValueConversion.astToAgGrid(ast)
|
||||
const value = cellValueConversion.astToAgGrid(ast as Ast.Expression)
|
||||
if (!value.ok) {
|
||||
console.error(
|
||||
`Cannot read \`${ast.code}\` as value in Table Widget; the Table widget should not be matched here!`,
|
||||
|
@ -19,7 +19,7 @@ const itemConfig = computed(() =>
|
||||
|
||||
const defaultItem = computed(() =>
|
||||
props.input.dynamicConfig?.kind === 'Vector_Editor' ?
|
||||
Ast.parse(props.input.dynamicConfig.item_default)
|
||||
Ast.parseExpression(props.input.dynamicConfig.item_default)
|
||||
: DEFAULT_ITEM.value,
|
||||
)
|
||||
|
||||
@ -45,22 +45,27 @@ const value = computed({
|
||||
|
||||
const navigator = injectGraphNavigator(true)
|
||||
|
||||
function useChildEditForwarding(input: WatchSource<Ast.Ast | unknown>) {
|
||||
function useChildEditForwarding(input: WatchSource<Ast.Expression | unknown>) {
|
||||
let editStarted = false
|
||||
const childEdit = shallowRef<{ origin: PortId; editedValue: Ast.Owned | string }>()
|
||||
const childEdit = shallowRef<{
|
||||
origin: PortId
|
||||
editedValue: Ast.Owned<Ast.MutableExpression> | string
|
||||
}>()
|
||||
|
||||
watchEffect(() => {
|
||||
if (!editStarted && !childEdit.value) return
|
||||
const inputValue = toValue(input)
|
||||
if (!(inputValue instanceof Ast.Ast)) return
|
||||
const editedAst = Ast.copyIntoNewModule(inputValue)
|
||||
const editedAst = Ast.copyIntoNewModule(inputValue as Ast.Expression)
|
||||
if (childEdit.value) {
|
||||
const module = editedAst.module
|
||||
const origin = childEdit.value.origin
|
||||
const ast = isAstId(origin) ? module.tryGet(origin) : undefined
|
||||
if (ast) {
|
||||
const replacement = childEdit.value.editedValue
|
||||
ast.replace(typeof replacement === 'string' ? Ast.parse(replacement, module) : replacement)
|
||||
ast.replace(
|
||||
typeof replacement === 'string' ? Ast.parseExpression(replacement, module)! : replacement,
|
||||
)
|
||||
}
|
||||
}
|
||||
editHandler.edit(editedAst)
|
||||
@ -71,7 +76,7 @@ function useChildEditForwarding(input: WatchSource<Ast.Ast | unknown>) {
|
||||
childEnded: (origin: PortId) => {
|
||||
if (childEdit.value?.origin === origin) childEdit.value = undefined
|
||||
},
|
||||
edit: (origin: PortId, value: Ast.Owned | string) => {
|
||||
edit: (origin: PortId, value: Ast.Owned<Ast.MutableExpression> | string) => {
|
||||
// The ID is used to locate a subtree; if the port isn't identified by an AstId, the lookup will simply fail.
|
||||
childEdit.value = { origin, editedValue: value }
|
||||
},
|
||||
@ -86,7 +91,7 @@ const editHandler = WidgetEditHandler.New('WidgetVector', props.input, {
|
||||
edit,
|
||||
})
|
||||
|
||||
function itemInput(ast: Ast.Ast): WidgetInput {
|
||||
function itemInput(ast: Ast.Expression): WidgetInput {
|
||||
return {
|
||||
...WidgetInput.FromAst(ast),
|
||||
dynamicConfig: itemConfig.value,
|
||||
@ -118,11 +123,11 @@ const DEFAULT_ITEM = computed(() => Ast.Wildcard.new())
|
||||
<ListWidget
|
||||
v-model="value"
|
||||
:newItem="newItem"
|
||||
:getKey="(ast: Ast.Ast) => ast.id"
|
||||
:getKey="(ast: Ast.Expression) => ast.id"
|
||||
dragMimeType="application/x-enso-ast-node"
|
||||
:toPlainText="(ast: Ast.Ast) => ast.code()"
|
||||
:toDragPayload="(ast: Ast.Ast) => Ast.serialize(ast)"
|
||||
:fromDragPayload="Ast.deserialize"
|
||||
:toPlainText="(ast: Ast.Expression) => ast.code()"
|
||||
:toDragPayload="(ast: Ast.Expression) => Ast.serializeExpression(ast)"
|
||||
:fromDragPayload="Ast.deserializeExpression"
|
||||
:toDragPosition="(p) => navigator?.clientToScenePos(p) ?? p"
|
||||
class="WidgetVector"
|
||||
contenteditable="false"
|
||||
|
@ -19,7 +19,6 @@ export interface LexicalPlugin {
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function lexicalTheme(theme: Record<string, string>): EditorThemeClasses {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
|
||||
interface EditorThemeShape extends Record<string, EditorThemeShape | string> {}
|
||||
const editorClasses: EditorThemeShape = {}
|
||||
for (const [classPath, className] of Object.entries(theme)) {
|
||||
|
@ -16,7 +16,9 @@ const { data } = defineProps<{ data: unknown }>()
|
||||
|
||||
const config = useVisualizationConfig()
|
||||
|
||||
type ConstructivePattern = (placeholder: Ast.Owned) => Ast.Owned
|
||||
type ConstructivePattern = (
|
||||
placeholder: Ast.Owned<Ast.MutableExpression>,
|
||||
) => Ast.Owned<Ast.MutableExpression>
|
||||
|
||||
const JSON_OBJECT_TYPE = 'Standard.Base.Data.Json.JS_Object'
|
||||
|
||||
@ -26,7 +28,7 @@ function projector(parentPattern: ConstructivePattern | undefined) {
|
||||
const style = {
|
||||
spaced: parentPattern !== undefined,
|
||||
}
|
||||
return (selector: number | string) => (source: Ast.Owned) =>
|
||||
return (selector: number | string) => (source: Ast.Owned<Ast.MutableExpression>) =>
|
||||
Ast.App.positional(
|
||||
Ast.PropertyAccess.new(
|
||||
source.module,
|
||||
|
@ -572,7 +572,7 @@ function getPlotData(data: Data) {
|
||||
return data.data
|
||||
}
|
||||
|
||||
const filterPattern = computed(() => Pattern.parse('__ (..Between __ __)'))
|
||||
const filterPattern = computed(() => Pattern.parseExpression('__ (..Between __ __)'))
|
||||
const makeFilterPattern = (
|
||||
module: Ast.MutableModule,
|
||||
columnName: string,
|
||||
@ -596,24 +596,24 @@ function getAstPatternFilterAndSort(
|
||||
minY: number,
|
||||
maxY: number,
|
||||
) {
|
||||
return Pattern.new((ast) => {
|
||||
let pattern: Ast.Owned<Ast.MutableOprApp> | Ast.Owned<Ast.MutableApp> = Ast.App.positional(
|
||||
Ast.PropertyAccess.new(ast.module, ast, Ast.identifier('filter')!),
|
||||
makeFilterPattern(ast.module, xColName, minX, maxX),
|
||||
)
|
||||
for (const s of series) {
|
||||
pattern = Ast.OprApp.new(
|
||||
ast.module,
|
||||
pattern,
|
||||
'.',
|
||||
Ast.App.positional(
|
||||
Ast.Ident.new(ast.module, Ast.identifier('filter')!),
|
||||
makeFilterPattern(ast.module, s!, minY, maxY),
|
||||
return Pattern.new<Ast.Expression>((ast) =>
|
||||
series.reduce<Ast.Owned<Ast.MutableExpression>>(
|
||||
(pattern, s) =>
|
||||
Ast.OprApp.new(
|
||||
ast.module,
|
||||
pattern,
|
||||
'.',
|
||||
Ast.App.positional(
|
||||
Ast.Ident.new(ast.module, Ast.identifier('filter')!),
|
||||
makeFilterPattern(ast.module, s!, minY, maxY),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
return pattern
|
||||
})
|
||||
Ast.App.positional(
|
||||
Ast.PropertyAccess.new(ast.module, ast, Ast.identifier('filter')!),
|
||||
makeFilterPattern(ast.module, xColName, minX, maxX),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
const createNewFilterNode = () => {
|
||||
const seriesLabels = Object.keys(data.value.axis)
|
||||
@ -639,7 +639,7 @@ const createNewFilterNode = () => {
|
||||
|
||||
function getAstPattern(selector?: number, action?: string) {
|
||||
if (action && selector != null) {
|
||||
return Pattern.new((ast) =>
|
||||
return Pattern.new<Ast.Expression>((ast) =>
|
||||
Ast.App.positional(
|
||||
Ast.PropertyAccess.new(ast.module, ast, Ast.identifier(action)!),
|
||||
Ast.tryNumberToEnso(selector, ast.module)!,
|
||||
|
@ -367,7 +367,7 @@ function toRowField(name: string, valueType?: ValueType | null | undefined) {
|
||||
|
||||
function getAstPattern(selector?: string | number, action?: string) {
|
||||
if (action && selector != null) {
|
||||
return Pattern.new((ast) =>
|
||||
return Pattern.new<Ast.Expression>((ast) =>
|
||||
Ast.App.positional(
|
||||
Ast.PropertyAccess.new(ast.module, ast, Ast.identifier(action)!),
|
||||
typeof selector === 'number' ?
|
||||
|
@ -13,7 +13,9 @@ export const defaultPreprocessor = [
|
||||
] as const
|
||||
|
||||
const removeWarnings = computed(() =>
|
||||
Pattern.new((ast) => Ast.PropertyAccess.new(ast.module, ast, Ast.identifier('remove_warnings')!)),
|
||||
Pattern.new<Ast.Expression>((ast) =>
|
||||
Ast.PropertyAccess.new(ast.module, ast, Ast.identifier('remove_warnings')!),
|
||||
),
|
||||
)
|
||||
</script>
|
||||
|
||||
|
@ -39,7 +39,7 @@ function useSortFilterNodesButton({
|
||||
isFilterSortNodeEnabled,
|
||||
createNodes,
|
||||
}: SortFilterNodesButtonOptions): ComputedRef<ToolbarItem | undefined> {
|
||||
const sortPatternPattern = computed(() => Pattern.parse('(..Name __ __ )'))
|
||||
const sortPatternPattern = computed(() => Pattern.parseExpression('(..Name __ __ )')!)
|
||||
|
||||
const sortDirection = computed(() => ({
|
||||
asc: '..Ascending',
|
||||
@ -53,36 +53,36 @@ function useSortFilterNodesButton({
|
||||
.map((sort) =>
|
||||
sortPatternPattern.value.instantiateCopied([
|
||||
Ast.TextLiteral.new(sort.columnName),
|
||||
Ast.parse(sortDirection.value[sort.sortDirection as SortDirection]),
|
||||
Ast.parseExpression(sortDirection.value[sort.sortDirection as SortDirection])!,
|
||||
]),
|
||||
)
|
||||
return Ast.Vector.new(module, columnSortExpressions)
|
||||
}
|
||||
|
||||
const filterPattern = computed(() => Pattern.parse('__ (__ __)'))
|
||||
const filterPattern = computed(() => Pattern.parseExpression('__ (__ __)')!)
|
||||
|
||||
function makeFilterPattern(module: Ast.MutableModule, columnName: string, items: string[]) {
|
||||
if (
|
||||
(items?.length === 1 && items.indexOf('true') != -1) ||
|
||||
(items?.length === 1 && items.indexOf('false') != -1)
|
||||
) {
|
||||
const boolToInclude = items.indexOf('false') != -1 ? Ast.parse('False') : Ast.parse('True')
|
||||
const boolToInclude = Ast.Ident.tryParse(items.indexOf('false') != -1 ? 'False' : 'True')!
|
||||
return filterPattern.value.instantiateCopied([
|
||||
Ast.TextLiteral.new(columnName),
|
||||
Ast.parse('..Equal'),
|
||||
Ast.parseExpression('..Equal')!,
|
||||
boolToInclude,
|
||||
])
|
||||
}
|
||||
const itemList = items.map((i) => Ast.TextLiteral.new(i))
|
||||
return filterPattern.value.instantiateCopied([
|
||||
Ast.TextLiteral.new(columnName),
|
||||
Ast.parse('..Is_In'),
|
||||
Ast.parseExpression('..Is_In')!,
|
||||
Ast.Vector.new(module, itemList),
|
||||
])
|
||||
}
|
||||
|
||||
function getAstPatternSort() {
|
||||
return Pattern.new((ast) =>
|
||||
return Pattern.new<Ast.Expression>((ast) =>
|
||||
Ast.App.positional(
|
||||
Ast.PropertyAccess.new(ast.module, ast, Ast.identifier('sort')!),
|
||||
makeSortPattern(ast.module),
|
||||
@ -91,7 +91,7 @@ function useSortFilterNodesButton({
|
||||
}
|
||||
|
||||
function getAstPatternFilter(columnName: string, items: string[]) {
|
||||
return Pattern.new((ast) =>
|
||||
return Pattern.new<Ast.Expression>((ast) =>
|
||||
Ast.App.positional(
|
||||
Ast.PropertyAccess.new(ast.module, ast, Ast.identifier('filter')!),
|
||||
makeFilterPattern(ast.module, columnName, items),
|
||||
@ -100,7 +100,7 @@ function useSortFilterNodesButton({
|
||||
}
|
||||
|
||||
function getAstPatternFilterAndSort(columnName: string, items: string[]) {
|
||||
return Pattern.new((ast) =>
|
||||
return Pattern.new<Ast.Expression>((ast) =>
|
||||
Ast.OprApp.new(
|
||||
ast.module,
|
||||
Ast.App.positional(
|
||||
|
@ -15,7 +15,7 @@ test.each([
|
||||
])('New node location in block', (...linesWithInsertionPoint: string[]) => {
|
||||
const inputLines = linesWithInsertionPoint.filter((line) => line !== '*')
|
||||
const bodyBlock = Ast.parseBlock(inputLines.join('\n'))
|
||||
insertNodeStatements(bodyBlock, [Ast.parse('newNodePositionMarker')])
|
||||
insertNodeStatements(bodyBlock, [Ast.parseStatement('newNodePositionMarker')!])
|
||||
const lines = bodyBlock
|
||||
.code()
|
||||
.split('\n')
|
||||
@ -26,11 +26,13 @@ test.each([
|
||||
// This is a special case because when a block is empty, adding a line requires adding *two* linebreaks.
|
||||
test('Adding node to empty block', () => {
|
||||
const module = Ast.MutableModule.Transient()
|
||||
const func = Ast.Function.fromStatements(module, identifier('f')!, [], [])
|
||||
const func = Ast.Function.new(identifier('f')!, [], Ast.BodyBlock.new([], module), {
|
||||
edit: module,
|
||||
})
|
||||
const rootBlock = Ast.BodyBlock.new([], module)
|
||||
rootBlock.push(func)
|
||||
expect(rootBlock.code().trimEnd()).toBe('f =')
|
||||
insertNodeStatements(func.bodyAsBlock(), [Ast.parse('newNode')])
|
||||
insertNodeStatements(func.bodyAsBlock(), [Ast.parseStatement('newNode')!])
|
||||
expect(
|
||||
rootBlock
|
||||
.code()
|
||||
|
@ -1,26 +1,31 @@
|
||||
import { type GraphStore } from '@/stores/graph'
|
||||
import { Ast } from '@/util/ast'
|
||||
import { type ToValue } from '@/util/reactivity'
|
||||
import { computed, toValue } from 'vue'
|
||||
import type { Ast } from 'ydoc-shared/ast'
|
||||
|
||||
/** A composable for reactively retrieving and setting documentation from given Ast node. */
|
||||
export function useAstDocumentation(graphStore: GraphStore, ast: ToValue<Ast | undefined>) {
|
||||
export function useAstDocumentation(graphStore: GraphStore, ast: ToValue<Ast.Ast | undefined>) {
|
||||
return {
|
||||
documentation: {
|
||||
state: computed(() => toValue(ast)?.documentingAncestor()?.documentation() ?? ''),
|
||||
set: (value: string) => {
|
||||
state: computed(() => {
|
||||
const astValue = toValue(ast)
|
||||
if (!astValue) return
|
||||
if (value.trimStart() !== '') {
|
||||
graphStore.getMutable(astValue).getOrInitDocumentation().setDocumentationText(value)
|
||||
} else {
|
||||
// Remove the documentation node.
|
||||
const documented = astValue.documentingAncestor()
|
||||
if (documented && documented.expression)
|
||||
graphStore.edit((edit) =>
|
||||
edit.getVersion(documented).update((documented) => documented.expression!.take()),
|
||||
)
|
||||
}
|
||||
return (astValue?.isStatement() ? astValue.documentationText() : undefined) ?? ''
|
||||
}),
|
||||
set: (text: string | undefined) => {
|
||||
const astValue = toValue(ast)
|
||||
graphStore.edit((edit) => {
|
||||
if (astValue?.isStatement()) {
|
||||
const editAst = edit.getVersion(astValue)
|
||||
// If the statement can have documentation attached (for example, it is a `Function`, `Assignment`, or
|
||||
// `ExpressionStatement`), do so. If in cannot (for example, it is an `import` declaration), an error will
|
||||
// be reported below.
|
||||
if ('setDocumentationText' in editAst) {
|
||||
editAst.setDocumentationText(text)
|
||||
return
|
||||
}
|
||||
}
|
||||
console.error('Unable to set documentation', astValue?.id)
|
||||
})
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -126,9 +126,13 @@ export function useNodeCreation(
|
||||
const createdIdentifiers = new Set<Identifier>()
|
||||
const identifiersRenameMap = new Map<Identifier, Identifier>()
|
||||
graphStore.edit((edit) => {
|
||||
const statements = new Array<Ast.Owned>()
|
||||
const statements = new Array<Ast.Owned<Ast.MutableStatement>>()
|
||||
for (const options of placedNodes) {
|
||||
const rhs = Ast.parse(options.expression, edit)
|
||||
const rhs = Ast.parseExpression(options.expression, edit)
|
||||
if (!rhs) {
|
||||
console.error('Cannot create node: invalid expression', options.expression)
|
||||
continue
|
||||
}
|
||||
const ident = getIdentifier(rhs, options, createdIdentifiers)
|
||||
createdIdentifiers.add(ident)
|
||||
const { id, rootExpression } = newAssignmentNode(
|
||||
@ -192,19 +196,16 @@ export function useNodeCreation(
|
||||
function newAssignmentNode(
|
||||
edit: Ast.MutableModule,
|
||||
ident: Ast.Identifier,
|
||||
rhs: Ast.Owned,
|
||||
rhs: Ast.Owned<Ast.MutableExpression>,
|
||||
options: NodeCreationOptions,
|
||||
identifiersRenameMap: Map<Ast.Identifier, Ast.Identifier>,
|
||||
) {
|
||||
rhs.setNodeMetadata(options.metadata ?? {})
|
||||
const assignment = Ast.Assignment.new(edit, ident, rhs)
|
||||
const { documentation } = options
|
||||
const assignment = Ast.Assignment.new(ident, rhs, { edit, documentation })
|
||||
afterCreation(edit, assignment, ident, options, identifiersRenameMap)
|
||||
const id = asNodeId(rhs.externalId)
|
||||
const rootExpression =
|
||||
options.documentation != null ?
|
||||
Ast.Documented.new(options.documentation, assignment)
|
||||
: assignment
|
||||
return { rootExpression, id }
|
||||
return { rootExpression: assignment, id }
|
||||
}
|
||||
|
||||
function getIdentifier(
|
||||
@ -270,10 +271,14 @@ function existingNameToPrefix(name: string): string {
|
||||
* The location will be after any statements in the block that bind identifiers; if the block ends in an expression
|
||||
* statement, the location will be before it so that the value of the block will not be affected.
|
||||
*/
|
||||
export function insertNodeStatements(bodyBlock: Ast.MutableBodyBlock, statements: Ast.Owned[]) {
|
||||
export function insertNodeStatements(
|
||||
bodyBlock: Ast.MutableBodyBlock,
|
||||
statements: Ast.Owned<Ast.MutableStatement>[],
|
||||
) {
|
||||
const lines = bodyBlock.lines
|
||||
const lastStatement = lines[lines.length - 1]?.statement?.node
|
||||
const index =
|
||||
lines[lines.length - 1]?.expression?.node.isBindingStatement !== false ?
|
||||
lastStatement instanceof Ast.MutableAssignment || lastStatement instanceof Ast.MutableFunction ?
|
||||
lines.length
|
||||
: lines.length - 1
|
||||
bodyBlock.insert(index, ...statements)
|
||||
|
@ -459,8 +459,10 @@ export const mockLSHandler: MockTransportData = async (method, data, transport)
|
||||
expressionId: ExpressionId
|
||||
expression: string
|
||||
}
|
||||
const aiPromptPat = Pattern.parse('Standard.Visualization.AI.build_ai_prompt __ . to_json')
|
||||
const exprAst = Ast.parse(data_.expression)
|
||||
const aiPromptPat = Pattern.parseExpression(
|
||||
'Standard.Visualization.AI.build_ai_prompt __ . to_json',
|
||||
)
|
||||
const exprAst = Ast.parseExpression(data_.expression)!
|
||||
if (aiPromptPat.test(exprAst)) {
|
||||
sendVizUpdate(
|
||||
data_.visualizationId,
|
||||
|
@ -56,8 +56,8 @@ describe('WidgetRegistry', () => {
|
||||
}),
|
||||
)
|
||||
|
||||
const someAst = WidgetInput.FromAst(Ast.parse('foo'))
|
||||
const blankAst = WidgetInput.FromAst(Ast.parse('_'))
|
||||
const someAst = WidgetInput.FromAst(Ast.parseExpression('foo'))
|
||||
const blankAst = WidgetInput.FromAst(Ast.parseExpression('_'))
|
||||
const someArgPlaceholder: WidgetInput = {
|
||||
portId: '57d429dc-df85-49f8-b150-567c7d1fb502' as PortId,
|
||||
value: 'bar',
|
||||
|
@ -12,21 +12,20 @@ import type { WidgetEditHandlerParent } from './widgetRegistry/editHandler'
|
||||
export type WidgetComponent<T extends WidgetInput> = Component<WidgetProps<T>>
|
||||
|
||||
export namespace WidgetInput {
|
||||
/** Create a basic {@link WidgetInput } from AST node. */
|
||||
export function FromAst<A extends Ast.Ast | Ast.Token>(ast: A): WidgetInput & { value: A } {
|
||||
/** Returns widget-input data for the given AST expression or token. */
|
||||
export function FromAst<A extends Ast.Expression | Ast.Token>(
|
||||
ast: A,
|
||||
): WidgetInput & { value: A } {
|
||||
return {
|
||||
portId: ast.id,
|
||||
value: ast,
|
||||
}
|
||||
}
|
||||
|
||||
/** Create a basic {@link WidgetInput } from AST node with enforced port. */
|
||||
export function FromAstWithPort<A extends Ast.Ast | Ast.Token>(
|
||||
ast: A,
|
||||
): WidgetInput & { value: A } {
|
||||
/** Returns the input marked to be a port. */
|
||||
export function WithPort<T extends WidgetInput>(input: T): T {
|
||||
return {
|
||||
portId: ast.id,
|
||||
value: ast,
|
||||
...input,
|
||||
forcePort: true,
|
||||
}
|
||||
}
|
||||
@ -57,14 +56,14 @@ export namespace WidgetInput {
|
||||
}
|
||||
|
||||
/** Check if input's value is existing AST node (not placeholder or token). */
|
||||
export function isAst(input: WidgetInput): input is WidgetInput & { value: Ast.Ast } {
|
||||
return input.value instanceof Ast.Ast
|
||||
export function isAst(input: WidgetInput): input is WidgetInput & { value: Ast.Expression } {
|
||||
return input.value instanceof Ast.Ast && input.value.isExpression()
|
||||
}
|
||||
|
||||
/** Check if input's value is existing AST node or placeholder. Rule out token inputs. */
|
||||
export function isAstOrPlaceholder(
|
||||
input: WidgetInput,
|
||||
): input is WidgetInput & { value: Ast.Ast | string | undefined } {
|
||||
): input is WidgetInput & { value: Ast.Expression | string | undefined } {
|
||||
return isPlaceholder(input) || isAst(input)
|
||||
}
|
||||
|
||||
@ -74,9 +73,9 @@ export namespace WidgetInput {
|
||||
}
|
||||
|
||||
/** Check if input's value is an AST which potentially may be a function call. */
|
||||
export function isFunctionCall(
|
||||
input: WidgetInput,
|
||||
): input is WidgetInput & { value: Ast.App | Ast.Ident | Ast.PropertyAccess | Ast.OprApp } {
|
||||
export function isFunctionCall(input: WidgetInput): input is WidgetInput & {
|
||||
value: Ast.App | Ast.Ident | Ast.PropertyAccess | Ast.OprApp | Ast.AutoscopedIdentifier
|
||||
} {
|
||||
return (
|
||||
input.value instanceof Ast.App ||
|
||||
input.value instanceof Ast.Ident ||
|
||||
@ -119,10 +118,10 @@ export interface WidgetInput {
|
||||
*/
|
||||
portId: PortId
|
||||
/**
|
||||
* An expected widget value. If Ast.Ast or Ast.Token, the widget represents an existing part of
|
||||
* An expected widget value. If Ast.Expression or Ast.Token, the widget represents an existing part of
|
||||
* code. If string, it may be e.g. a default value of an argument.
|
||||
*/
|
||||
value: Ast.Ast | Ast.Token | string | undefined
|
||||
value: Ast.Expression | Ast.Token | string | undefined
|
||||
/** An expected type which widget should set. */
|
||||
expectedType?: Typename | undefined
|
||||
/** Configuration provided by engine. */
|
||||
@ -172,7 +171,7 @@ export interface WidgetProps<T> {
|
||||
export interface WidgetUpdate {
|
||||
edit?: MutableModule | undefined
|
||||
portUpdate?: { origin: PortId } & (
|
||||
| { value: Ast.Owned | string | undefined }
|
||||
| { value: Ast.Owned<Ast.MutableExpression> | string | undefined }
|
||||
| { metadataKey: string; metadata: unknown }
|
||||
)
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ export abstract class WidgetEditHandlerParent {
|
||||
this.parent?.unsetActiveChild(this)
|
||||
}
|
||||
|
||||
protected onEdit(origin: PortId, value: Ast.Owned | string): void {
|
||||
protected onEdit(origin: PortId, value: Ast.Owned<Ast.MutableExpression> | string): void {
|
||||
this.hooks.edit?.(origin, value)
|
||||
this.parent?.onEdit(origin, value)
|
||||
}
|
||||
@ -265,8 +265,8 @@ export class WidgetEditHandler extends WidgetEditHandlerParent {
|
||||
this.onStart(this.portId)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
edit(value: Ast.Owned | string) {
|
||||
/** Emit an event updating the widget's value. */
|
||||
edit(value: Ast.Owned<Ast.MutableExpression> | string) {
|
||||
this.onEdit(this.portId, value)
|
||||
}
|
||||
}
|
||||
@ -281,7 +281,7 @@ export interface WidgetEditHooks extends Interaction {
|
||||
end?(origin?: PortId | undefined): void
|
||||
childEnded?(origin?: PortId | undefined): void
|
||||
/** Hook called when a child widget provides an updated value. */
|
||||
edit?(origin: PortId, value: Ast.Owned | string): void
|
||||
edit?(origin: PortId, value: Ast.Owned<Ast.MutableExpression> | string): void
|
||||
/**
|
||||
* Hook enabling a widget to provide a handler for the add-item intent of a child widget. The parent can return true
|
||||
* to indicate that creating the new item has been handled and the child should not perform its action in this case.
|
||||
|
@ -10,7 +10,7 @@ export { injectFn as injectWidgetTree, provideFn as provideWidgetTree }
|
||||
const { provideFn, injectFn } = createContextStore(
|
||||
'Widget tree',
|
||||
(
|
||||
astRoot: Ref<Ast.Ast>,
|
||||
astRoot: Ref<Ast.Expression>,
|
||||
nodeId: Ref<NodeId>,
|
||||
nodeElement: Ref<HTMLElement | undefined>,
|
||||
nodeSize: Ref<Vec2>,
|
||||
|
@ -25,7 +25,7 @@ export function parseWithSpans<T extends Record<string, SourceRange>>(code: stri
|
||||
|
||||
const { root: ast, toRaw, getSpan } = Ast.parseExtended(code, idMap)
|
||||
const idFromExternal = new Map<ExternalId, AstId>()
|
||||
ast.visitRecursiveAst((ast) => {
|
||||
ast.visitRecursive((ast) => {
|
||||
idFromExternal.set(ast.externalId, ast.id)
|
||||
})
|
||||
const id = (name: keyof T) => idFromExternal.get(eid(name))!
|
||||
|
@ -5,14 +5,16 @@ import type { SuggestionEntry } from '@/stores/suggestionDatabase/entry'
|
||||
import { assert } from '@/util/assert'
|
||||
import { Ast, RawAst } from '@/util/ast'
|
||||
import type { AstId, NodeMetadata } from '@/util/ast/abstract'
|
||||
import { autospaced, MutableModule } from '@/util/ast/abstract'
|
||||
import { MutableModule } from '@/util/ast/abstract'
|
||||
import { AliasAnalyzer } from '@/util/ast/aliasAnalysis'
|
||||
import { inputNodeFromAst, nodeFromAst, nodeRootExpr } from '@/util/ast/node'
|
||||
import { MappedKeyMap, MappedSet } from '@/util/containers'
|
||||
import { tryGetIndex } from '@/util/data/array'
|
||||
import { recordEqual } from '@/util/data/object'
|
||||
import { unwrap } from '@/util/data/result'
|
||||
import { Vec2 } from '@/util/data/vec2'
|
||||
import { ReactiveDb, ReactiveIndex, ReactiveMapping } from '@/util/database/reactiveDb'
|
||||
import { tryIdentifier } from '@/util/qualifiedName'
|
||||
import {
|
||||
nonReactiveView,
|
||||
resumeReactivity,
|
||||
@ -67,8 +69,10 @@ export class BindingsDb {
|
||||
// Add or update bindings.
|
||||
for (const [bindingRange, usagesRanges] of analyzer.aliases) {
|
||||
const aliasAst = bindingRangeToTree.get(bindingRange)
|
||||
assert(aliasAst != null)
|
||||
if (aliasAst == null) continue
|
||||
if (aliasAst == null) {
|
||||
console.warn(`Binding not found`, bindingRange)
|
||||
continue
|
||||
}
|
||||
const aliasAstId = aliasAst.id
|
||||
const info = this.bindings.get(aliasAstId)
|
||||
if (info == null) {
|
||||
@ -121,7 +125,7 @@ export class BindingsDb {
|
||||
bindingRanges.add(binding)
|
||||
for (const usage of usages) bindingRanges.add(usage)
|
||||
}
|
||||
ast.visitRecursiveAst((ast) => {
|
||||
ast.visitRecursive((ast) => {
|
||||
const span = getSpan(ast.id)
|
||||
assert(span != null)
|
||||
if (bindingRanges.has(span)) {
|
||||
@ -153,13 +157,13 @@ export class GraphDb {
|
||||
|
||||
private nodeIdToPatternExprIds = new ReactiveIndex(this.nodeIdToNode, (id, entry) => {
|
||||
const exprs: AstId[] = []
|
||||
if (entry.pattern) entry.pattern.visitRecursiveAst((ast) => void exprs.push(ast.id))
|
||||
if (entry.pattern) entry.pattern.visitRecursive((ast) => void exprs.push(ast.id))
|
||||
return Array.from(exprs, (expr) => [id, expr])
|
||||
})
|
||||
|
||||
private nodeIdToExprIds = new ReactiveIndex(this.nodeIdToNode, (id, entry) => {
|
||||
const exprs: AstId[] = []
|
||||
entry.innerExpr.visitRecursiveAst((ast) => void exprs.push(ast.id))
|
||||
entry.innerExpr.visitRecursive((ast) => void exprs.push(ast.id))
|
||||
return Array.from(exprs, (expr) => [id, expr])
|
||||
})
|
||||
|
||||
@ -195,7 +199,7 @@ export class GraphDb {
|
||||
nodeOutputPorts = new ReactiveIndex(this.nodeIdToNode, (id, entry) => {
|
||||
if (entry.pattern == null) return []
|
||||
const ports = new Set<AstId>()
|
||||
entry.pattern.visitRecursiveAst((ast) => {
|
||||
entry.pattern.visitRecursive((ast) => {
|
||||
if (this.bindings.bindings.has(ast.id)) {
|
||||
ports.add(ast.id)
|
||||
return false
|
||||
@ -350,7 +354,7 @@ export class GraphDb {
|
||||
const args = functionAst_.argumentDefinitions
|
||||
const update = (
|
||||
nodeId: NodeId,
|
||||
ast: Ast.Ast,
|
||||
ast: Ast.Expression | Ast.Statement,
|
||||
isInput: boolean,
|
||||
isOutput: boolean,
|
||||
argIndex: number | undefined,
|
||||
@ -383,7 +387,7 @@ export class GraphDb {
|
||||
update(nodeId, argPattern, true, false, index)
|
||||
})
|
||||
body.forEach((outerAst, index) => {
|
||||
const nodeId = nodeIdFromOuterExpr(outerAst)
|
||||
const nodeId = nodeIdFromOuterAst(outerAst)
|
||||
if (!nodeId) return
|
||||
const isLastInBlock = index === body.length - 1
|
||||
update(nodeId, outerAst, false, isLastInBlock, undefined)
|
||||
@ -400,12 +404,15 @@ export class GraphDb {
|
||||
/** Scan a node's content from its outer expression down to, but not including, its inner expression. */
|
||||
private updateNodeStructure(
|
||||
nodeId: NodeId,
|
||||
ast: Ast.Ast,
|
||||
ast: Ast.Statement | Ast.Expression,
|
||||
isOutput: boolean,
|
||||
isInput: boolean,
|
||||
argIndex?: number,
|
||||
) {
|
||||
const newNode = isInput ? inputNodeFromAst(ast, argIndex ?? 0) : nodeFromAst(ast, isOutput)
|
||||
const newNode =
|
||||
isInput ?
|
||||
inputNodeFromAst(ast as Ast.Expression, argIndex ?? 0)
|
||||
: nodeFromAst(ast as Ast.Statement, isOutput)
|
||||
if (!newNode) return
|
||||
const oldNode = this.nodeIdToNode.getUntracked(nodeId)
|
||||
if (oldNode == null) {
|
||||
@ -424,14 +431,13 @@ export class GraphDb {
|
||||
} else {
|
||||
const {
|
||||
type,
|
||||
outerExpr,
|
||||
outerAst,
|
||||
pattern,
|
||||
rootExpr,
|
||||
innerExpr,
|
||||
primarySubject,
|
||||
prefixes,
|
||||
conditionalPorts,
|
||||
docs,
|
||||
argIndex,
|
||||
} = newNode
|
||||
const node = resumeReactivity(oldNode)
|
||||
@ -440,7 +446,7 @@ export class GraphDb {
|
||||
const updateAst = (field: NodeAstField) => {
|
||||
if (oldNode[field]?.id !== newNode[field]?.id) node[field] = newNode[field] as any
|
||||
}
|
||||
const astFields: NodeAstField[] = ['outerExpr', 'pattern', 'rootExpr', 'innerExpr', 'docs']
|
||||
const astFields: NodeAstField[] = ['outerAst', 'pattern', 'rootExpr', 'innerExpr']
|
||||
astFields.forEach(updateAst)
|
||||
if (oldNode.primarySubject !== primarySubject) node.primarySubject = primarySubject
|
||||
if (!recordEqual(oldNode.prefixes, prefixes)) node.prefixes = prefixes
|
||||
@ -448,14 +454,13 @@ export class GraphDb {
|
||||
// Ensure new fields can't be added to `NodeAstData` without this code being updated.
|
||||
const _allFieldsHandled = {
|
||||
type,
|
||||
outerExpr,
|
||||
outerAst,
|
||||
pattern,
|
||||
rootExpr,
|
||||
innerExpr,
|
||||
primarySubject,
|
||||
prefixes,
|
||||
conditionalPorts,
|
||||
docs,
|
||||
argIndex,
|
||||
} satisfies NodeDataFromAst
|
||||
}
|
||||
@ -475,7 +480,7 @@ export class GraphDb {
|
||||
updateExternalIds(topLevel: Ast.Ast) {
|
||||
const idToExternalNew = new Map()
|
||||
const idFromExternalNew = new Map()
|
||||
topLevel.visitRecursiveAst((ast) => {
|
||||
topLevel.visitRecursive((ast) => {
|
||||
idToExternalNew.set(ast.id, ast.externalId)
|
||||
idFromExternalNew.set(ast.externalId, ast.id)
|
||||
})
|
||||
@ -540,14 +545,10 @@ export class GraphDb {
|
||||
/** TODO: Add docs */
|
||||
mockNode(binding: string, id: NodeId, code?: string): Node {
|
||||
const edit = MutableModule.Transient()
|
||||
const pattern = Ast.parse(binding, edit)
|
||||
const expression = Ast.parse(code ?? '0', edit)
|
||||
const outerExpr = Ast.Assignment.concrete(
|
||||
edit,
|
||||
autospaced(pattern),
|
||||
{ node: Ast.Token.new('='), whitespace: ' ' },
|
||||
{ node: expression, whitespace: ' ' },
|
||||
)
|
||||
const ident = unwrap(tryIdentifier(binding))
|
||||
const expression = Ast.parseExpression(code ?? '0', edit)!
|
||||
const outerAst = Ast.Assignment.new(ident, expression, { edit })
|
||||
const pattern = outerAst.pattern
|
||||
|
||||
const node: Node = {
|
||||
type: 'component',
|
||||
@ -557,11 +558,10 @@ export class GraphDb {
|
||||
primarySubject: undefined,
|
||||
colorOverride: undefined,
|
||||
conditionalPorts: new Set(),
|
||||
docs: undefined,
|
||||
outerExpr,
|
||||
outerAst,
|
||||
pattern,
|
||||
rootExpr: Ast.parse(code ?? '0'),
|
||||
innerExpr: Ast.parse(code ?? '0'),
|
||||
rootExpr: expression,
|
||||
innerExpr: expression,
|
||||
zIndex: this.highestZIndex,
|
||||
argIndex: undefined,
|
||||
}
|
||||
@ -574,7 +574,7 @@ export class GraphDb {
|
||||
|
||||
/** Source code data of the specific node. */
|
||||
interface NodeSource {
|
||||
/** The outer AST of the node (see {@link NodeDataFromAst.outerExpr}). */
|
||||
/** The outer AST of the node (see {@link NodeDataFromAst.outerAst}). */
|
||||
outerAst: Ast.Ast
|
||||
/**
|
||||
* Whether the node is `output` of the function or not. Mutually exclusive with `isInput`.
|
||||
@ -602,28 +602,37 @@ export function asNodeId(id: ExternalId | undefined): NodeId | undefined {
|
||||
return id != null ? (id as NodeId) : undefined
|
||||
}
|
||||
|
||||
/** Given an expression at the top level of a block, return the `NodeId` for the expression. */
|
||||
export function nodeIdFromOuterExpr(outerExpr: Ast.Ast) {
|
||||
const { root } = nodeRootExpr(outerExpr)
|
||||
/** Given the outermost AST for a node, returns its {@link NodeId}. */
|
||||
export function nodeIdFromOuterAst(outerAst: Ast.Statement | Ast.Expression) {
|
||||
const { root } = nodeRootExpr(outerAst)
|
||||
return root && asNodeId(root.externalId)
|
||||
}
|
||||
|
||||
export interface NodeDataFromAst {
|
||||
type: NodeType
|
||||
/** The outer expression, usually an assignment expression (`a = b`). */
|
||||
outerExpr: Ast.Ast
|
||||
/** The left side of the assignment expression, if `outerExpr` is an assignment expression. */
|
||||
pattern: Ast.Ast | undefined
|
||||
/**
|
||||
* The value of the node. The right side of the assignment, if `outerExpr` is an assignment
|
||||
* expression, else the entire `outerExpr`.
|
||||
* The statement or top-level expression.
|
||||
*
|
||||
* If the function has a body block, the nodes derived from the block are statements:
|
||||
* - Assignment expressions (`a = b`)
|
||||
* - Expression-statements (unnamed nodes and output nodes)
|
||||
* If the function has a single-line body, the corresponding node will be an expression.
|
||||
*
|
||||
* Nodes for the function's inputs have (pattern) expressions as their outer ASTs.
|
||||
*/
|
||||
rootExpr: Ast.Ast
|
||||
outerAst: Ast.Statement | Ast.Expression
|
||||
/** The left side of the assignment expression, if `outerAst` is an assignment expression. */
|
||||
pattern: Ast.Expression | undefined
|
||||
/**
|
||||
* The value of the node. The right side of the assignment, if `outerAst` is an assignment
|
||||
* expression, else the entire `outerAst`.
|
||||
*/
|
||||
rootExpr: Ast.Expression
|
||||
/**
|
||||
* The expression displayed by the node. This is `rootExpr`, minus the prefixes, which are in
|
||||
* `prefixes`.
|
||||
*/
|
||||
innerExpr: Ast.Ast
|
||||
innerExpr: Ast.Expression
|
||||
/**
|
||||
Prefixes that are present in `rootExpr` but omitted in `innerExpr` to ensure a clean output.
|
||||
*/
|
||||
@ -632,8 +641,6 @@ export interface NodeDataFromAst {
|
||||
primarySubject: Ast.AstId | undefined
|
||||
/** Ports that are not targetable by default; they can be targeted while holding the modifier key. */
|
||||
conditionalPorts: Set<Ast.AstId>
|
||||
/** An AST node containing the node's documentation comment. */
|
||||
docs: Ast.Documented | undefined
|
||||
/** The index of the argument in the function's argument list, if the node is an input node. */
|
||||
argIndex: number | undefined
|
||||
}
|
||||
|
@ -99,9 +99,9 @@ export interface UnqualifiedImport {
|
||||
}
|
||||
|
||||
/** Read imports from given module block */
|
||||
export function readImports(ast: Ast.Ast): Import[] {
|
||||
export function readImports(ast: Ast.BodyBlock): Import[] {
|
||||
const imports: Import[] = []
|
||||
ast.visitRecursiveAst((node) => {
|
||||
ast.visitRecursive((node) => {
|
||||
if (node instanceof Ast.Import) {
|
||||
const recognized = recognizeImport(node)
|
||||
if (recognized) {
|
||||
@ -132,8 +132,8 @@ function newImportsLocation(scope: Ast.BodyBlock): number {
|
||||
const lines = scope.lines
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i]!
|
||||
if (line.expression) {
|
||||
if (line.expression.node?.innerExpression() instanceof Ast.Import) {
|
||||
if (line.statement) {
|
||||
if (line.statement.node instanceof Ast.Import) {
|
||||
lastImport = i
|
||||
} else {
|
||||
break
|
||||
|
@ -2,7 +2,7 @@ import { usePlacement } from '@/components/ComponentBrowser/placement'
|
||||
import { createContextStore } from '@/providers'
|
||||
import type { PortId } from '@/providers/portInfo'
|
||||
import type { WidgetUpdate } from '@/providers/widgetRegistry'
|
||||
import { GraphDb, nodeIdFromOuterExpr, type NodeId } from '@/stores/graph/graphDatabase'
|
||||
import { GraphDb, nodeIdFromOuterAst, type NodeId } from '@/stores/graph/graphDatabase'
|
||||
import {
|
||||
addImports,
|
||||
detectImportConflicts,
|
||||
@ -223,7 +223,7 @@ export const { injectFn: useGraphStore, provideFn: provideGraphStore } = createC
|
||||
return Err('Method pointer is not a module method')
|
||||
const method = Ast.findModuleMethod(topLevel, ptr.name)
|
||||
if (!method) return Err(`No method with name ${ptr.name} in ${modulePath.value}`)
|
||||
return Ok(method)
|
||||
return Ok(method.statement)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -330,8 +330,8 @@ export const { injectFn: useGraphStore, provideFn: provideGraphStore } = createC
|
||||
|
||||
updatePortValue(edit, usage, undefined)
|
||||
}
|
||||
const outerExpr = edit.getVersion(node.outerExpr)
|
||||
if (outerExpr) Ast.deleteFromParentBlock(outerExpr)
|
||||
const outerAst = edit.getVersion(node.outerAst)
|
||||
if (outerAst.isStatement()) Ast.deleteFromParentBlock(outerAst)
|
||||
nodeRects.delete(id)
|
||||
nodeHoverAnimations.delete(id)
|
||||
deletedNodes.add(id)
|
||||
@ -576,7 +576,7 @@ export const { injectFn: useGraphStore, provideFn: provideGraphStore } = createC
|
||||
function updatePortValue(
|
||||
edit: MutableModule,
|
||||
id: PortId,
|
||||
value: Ast.Owned | undefined,
|
||||
value: Ast.Owned<Ast.MutableExpression> | undefined,
|
||||
): boolean {
|
||||
const update = getPortPrimaryInstance(id)?.onUpdate
|
||||
if (!update) return false
|
||||
@ -692,7 +692,7 @@ export const { injectFn: useGraphStore, provideFn: provideGraphStore } = createC
|
||||
const body = func.bodyExpressions()
|
||||
const result: NodeId[] = []
|
||||
for (const expr of body) {
|
||||
const nodeId = nodeIdFromOuterExpr(expr)
|
||||
const nodeId = nodeIdFromOuterAst(expr)
|
||||
if (nodeId && ids.has(nodeId)) result.push(nodeId)
|
||||
}
|
||||
return result
|
||||
@ -710,14 +710,14 @@ export const { injectFn: useGraphStore, provideFn: provideGraphStore } = createC
|
||||
sourceNodeId: NodeId,
|
||||
targetNodeId: NodeId,
|
||||
) {
|
||||
const sourceExpr = db.nodeIdToNode.get(sourceNodeId)?.outerExpr.id
|
||||
const targetExpr = db.nodeIdToNode.get(targetNodeId)?.outerExpr.id
|
||||
const sourceExpr = db.nodeIdToNode.get(sourceNodeId)?.outerAst.id
|
||||
const targetExpr = db.nodeIdToNode.get(targetNodeId)?.outerAst.id
|
||||
const body = edit.getVersion(unwrap(getExecutedMethodAst(edit))).bodyAsBlock()
|
||||
assert(sourceExpr != null)
|
||||
assert(targetExpr != null)
|
||||
const lines = body.lines
|
||||
const sourceIdx = lines.findIndex((line) => line.expression?.node.id === sourceExpr)
|
||||
const targetIdx = lines.findIndex((line) => line.expression?.node.id === targetExpr)
|
||||
const sourceIdx = lines.findIndex((line) => line.statement?.node.id === sourceExpr)
|
||||
const targetIdx = lines.findIndex((line) => line.statement?.node.id === targetExpr)
|
||||
assert(sourceIdx != null)
|
||||
assert(targetIdx != null)
|
||||
|
||||
@ -727,7 +727,7 @@ export const { injectFn: useGraphStore, provideFn: provideGraphStore } = createC
|
||||
const deps = reachable([targetNodeId], (node) => db.nodeDependents.lookup(node))
|
||||
|
||||
const dependantLines = new Set(
|
||||
Array.from(deps, (id) => db.nodeIdToNode.get(id)?.outerExpr.id),
|
||||
Array.from(deps, (id) => db.nodeIdToNode.get(id)?.outerAst.id),
|
||||
)
|
||||
// Include the new target itself in the set of lines that must be placed after source node.
|
||||
dependantLines.add(targetExpr)
|
||||
@ -744,7 +744,7 @@ export const { injectFn: useGraphStore, provideFn: provideGraphStore } = createC
|
||||
|
||||
// Split those lines into two buckets, whether or not they depend on the target.
|
||||
const [linesAfter, linesBefore] = partition(linesToSort, (line) =>
|
||||
dependantLines.has(line.expression?.node.id),
|
||||
dependantLines.has(line.statement?.node.id),
|
||||
)
|
||||
|
||||
// Recombine all lines after splitting, keeping existing dependants below the target.
|
||||
|
@ -101,7 +101,7 @@ test.each`
|
||||
expectedPattern,
|
||||
fixture: { allowInfix, mockSuggestion, argsParameters },
|
||||
}: TestData) => {
|
||||
const ast = Ast.parse(expression.trim())
|
||||
const ast = Ast.parseExpression(expression.trim())
|
||||
|
||||
const configuration: widgetCfg.FunctionCall = {
|
||||
kind: 'FunctionCall',
|
||||
@ -207,7 +207,7 @@ test.each([
|
||||
({ code, subapplicationIndex, notAppliedArguments, expectedNotAppliedArguments }: TestCase) => {
|
||||
const { db, expectedMethodCall, expectedSuggestion, setExpressionInfo } =
|
||||
prepareMocksForGetMethodCallTest()
|
||||
const ast = Ast.parse(code)
|
||||
const ast = Ast.parseExpression(code)
|
||||
db.updateExternalIds(ast)
|
||||
const subApplication = nthSubapplication(ast, subapplicationIndex)
|
||||
assert(subApplication)
|
||||
@ -345,7 +345,7 @@ test.each([
|
||||
'Computing IDs of arguments: $description',
|
||||
({ code, subapplicationIndex, notAppliedArguments, expectedSameIds }: ArgsTestCase) => {
|
||||
const { db, expectedMethodCall, setExpressionInfo } = prepareMocksForGetMethodCallTest()
|
||||
const ast = Ast.parse(code)
|
||||
const ast = Ast.parseExpression(code)
|
||||
const subApplication = nthSubapplication(ast, subapplicationIndex)
|
||||
assert(subApplication)
|
||||
db.updateExternalIds(ast)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -15,6 +15,7 @@ import {
|
||||
} from '@/util/ast/abstract'
|
||||
import { fc, test } from '@fast-check/vitest'
|
||||
import { describe, expect } from 'vitest'
|
||||
import { BodyBlock } from 'ydoc-shared/ast'
|
||||
import { findExpressions, testCase, tryFindExpressions } from './testCase'
|
||||
|
||||
test('Raw block abstracts to Ast.BodyBlock', () => {
|
||||
@ -25,10 +26,21 @@ test('Raw block abstracts to Ast.BodyBlock', () => {
|
||||
expect(abstracted.root).toBeInstanceOf(Ast.BodyBlock)
|
||||
})
|
||||
|
||||
//const disabledCases = [
|
||||
// ' a',
|
||||
// 'a ',
|
||||
//]
|
||||
// FIXME: Parsing source code and reprinting it should produce exactly the same output as input. The following cases are
|
||||
// known to be incorrectly handled. For each such case the test checks the result of parsing and reprinting to ensure
|
||||
// it is at least a reasonable normalization of the input.
|
||||
const normalizingCases = [
|
||||
{ input: ' a', normalized: ' a' },
|
||||
{ input: 'a ', normalized: 'a \n' },
|
||||
{
|
||||
input: ['main =', ' foo', ' bar', ' baz'].join('\n'),
|
||||
normalized: ['main =', ' foo', ' bar', ' baz'].join('\n'),
|
||||
},
|
||||
{
|
||||
input: ['main =', ' foo', ' bar', 'baz'].join('\n'),
|
||||
normalized: ['main =', ' foo', ' bar', 'baz'].join('\n'),
|
||||
},
|
||||
]
|
||||
const cases = [
|
||||
'Console.',
|
||||
'(',
|
||||
@ -309,8 +321,6 @@ const cases = [
|
||||
['foo', ' + bar +'].join('\n'),
|
||||
['foo', ' + bar', ' - baz'].join('\n'),
|
||||
['main =', ' foo', 'bar'].join('\n'),
|
||||
['main =', ' foo', ' bar', ' baz'].join('\n'),
|
||||
['main =', ' foo', ' bar', 'baz'].join('\n'),
|
||||
['main ~foo = x'].join('\n'),
|
||||
['main =', ' ', ' x'].join('\n'),
|
||||
['main =', ' ', ' x'].join('\n'),
|
||||
@ -375,13 +385,18 @@ const cases = [
|
||||
'\n\n',
|
||||
'\na',
|
||||
'\n\na',
|
||||
...normalizingCases,
|
||||
]
|
||||
test.each(cases)('parse/print round trip: %s', (code) => {
|
||||
test.each(cases)('parse/print round-trip: %s', (testCase) => {
|
||||
const code = typeof testCase === 'object' ? testCase.input : testCase
|
||||
const expectedCode = typeof testCase === 'object' ? testCase.normalized : testCase
|
||||
|
||||
// Get an AST.
|
||||
const { root } = Ast.parseModuleWithSpans(code)
|
||||
const root = Ast.parseModule(code)
|
||||
root.module.setRoot(root)
|
||||
// Print AST back to source.
|
||||
const printed = Ast.print(root)
|
||||
expect(printed.code).toEqual(code)
|
||||
expect(printed.code).toEqual(expectedCode)
|
||||
// Loading token IDs from IdMaps is not implemented yet, fix during sync.
|
||||
printed.info.tokens.clear()
|
||||
const idMap = Ast.spanMapToIdMap(printed.info)
|
||||
@ -403,22 +418,29 @@ test.each(cases)('parse/print round trip: %s', (code) => {
|
||||
})
|
||||
|
||||
const parseCases = [
|
||||
{ code: 'foo bar+baz', tree: ['', [['foo'], [['bar'], '+', ['baz']]]] },
|
||||
{ code: '(foo)', tree: ['', ['(', ['foo'], ')']] },
|
||||
{ code: 'foo bar+baz', tree: [['foo'], [['bar'], '+', ['baz']]] },
|
||||
{ code: '(foo)', tree: ['(', ['foo'], ')'] },
|
||||
]
|
||||
test.each(parseCases)('parse: %s', (testCase) => {
|
||||
const root = Ast.parseBlock(testCase.code)
|
||||
const root = Ast.parseExpression(testCase.code)
|
||||
assertDefined(root)
|
||||
expect(Ast.tokenTree(root)).toEqual(testCase.tree)
|
||||
})
|
||||
|
||||
function functionBlock(topLevel: BodyBlock, name: string) {
|
||||
const func = findModuleMethod(topLevel, name)
|
||||
if (!(func?.statement.body instanceof BodyBlock)) return undefined
|
||||
return func.statement.body
|
||||
}
|
||||
|
||||
test('Insert new expression', () => {
|
||||
const code = 'main =\n text1 = "foo"\n'
|
||||
const root = Ast.parseBlock(code)
|
||||
const main = Ast.functionBlock(root, 'main')!
|
||||
const main = functionBlock(root, 'main')!
|
||||
expect(main).toBeDefined()
|
||||
const edit = root.module.edit()
|
||||
const rhs = Ast.parse('42', edit)
|
||||
const assignment = Ast.Assignment.new(edit, 'baz' as Identifier, rhs)
|
||||
const rhs = Ast.parseExpression('42', edit)!
|
||||
const assignment = Ast.Assignment.new('baz' as Identifier, rhs, { edit })
|
||||
edit.getVersion(main).push(assignment)
|
||||
const printed = edit.getVersion(root).code()
|
||||
expect(printed).toEqual('main =\n text1 = "foo"\n baz = 42\n')
|
||||
@ -433,7 +455,7 @@ type SimpleModule = {
|
||||
function simpleModule(): SimpleModule {
|
||||
const code = 'main =\n text1 = "foo"\n'
|
||||
const root = Ast.parseBlock(code)
|
||||
const main = findModuleMethod(root, 'main')!
|
||||
const main = findModuleMethod(root, 'main')!.statement
|
||||
const mainBlock = main.body instanceof Ast.BodyBlock ? main.body : null
|
||||
assert(mainBlock != null)
|
||||
expect(mainBlock).toBeInstanceOf(Ast.BodyBlock)
|
||||
@ -475,8 +497,8 @@ test('Replace subexpression', () => {
|
||||
const newValue = Ast.TextLiteral.new('bar', edit)
|
||||
expect(newValue.code()).toBe("'bar'")
|
||||
edit.replace(assignment.expression!.id, newValue)
|
||||
const assignment_ = edit.tryGet(assignment.id)!
|
||||
assert(assignment_ instanceof Ast.Assignment)
|
||||
const assignment_ = edit.tryGet(assignment.id)
|
||||
assert(assignment_ instanceof Ast.MutableAssignment)
|
||||
expect(assignment_.expression!.id).toBe(newValue.id)
|
||||
expect(edit.tryGet(assignment_.expression!.id)?.code()).toBe("'bar'")
|
||||
const printed = edit.getVersion(root).code()
|
||||
@ -487,14 +509,16 @@ test('Modify subexpression - setting a vector', () => {
|
||||
// A case where the #9357 bug was visible.
|
||||
const code = 'main =\n text1 = foo\n'
|
||||
const root = Ast.parseBlock(code)
|
||||
const main = Ast.functionBlock(root, 'main')!
|
||||
const main = functionBlock(root, 'main')!
|
||||
expect(main).not.toBeNull()
|
||||
const assignment: Ast.Assignment = main.statements().next().value
|
||||
expect(assignment).toBeInstanceOf(Ast.Assignment)
|
||||
|
||||
const edit = root.module.edit()
|
||||
const transientModule = MutableModule.Transient()
|
||||
const newValue = Ast.Vector.new(transientModule, [Ast.parse('bar')])
|
||||
const barExpression = Ast.parseExpression('bar')
|
||||
assertDefined(barExpression)
|
||||
const newValue = Ast.Vector.new(transientModule, [barExpression])
|
||||
expect(newValue.code()).toBe('[bar]')
|
||||
edit.replaceValue(assignment.expression.id, newValue)
|
||||
const printed = edit.getVersion(root).code()
|
||||
@ -520,10 +544,10 @@ test('Block lines interface', () => {
|
||||
const block = Ast.parseBlock('VLE \nSISI\nGNIK \n')
|
||||
// Sort alphabetically, but keep the blank line at the end.
|
||||
const reordered = block.takeLines().sort((a, b) => {
|
||||
if (a.expression?.node.code() === b.expression?.node.code()) return 0
|
||||
if (!a.expression) return 1
|
||||
if (!b.expression) return -1
|
||||
return a.expression.node.code() < b.expression.node.code() ? -1 : 1
|
||||
if (a.statement?.node.code() === b.statement?.node.code()) return 0
|
||||
if (!a.statement) return 1
|
||||
if (!b.statement) return -1
|
||||
return a.statement.node.code() < b.statement.node.code() ? -1 : 1
|
||||
})
|
||||
const edit = block.module.edit()
|
||||
const newBlock = Ast.BodyBlock.new(reordered, edit)
|
||||
@ -560,16 +584,19 @@ test('Construct app', () => {
|
||||
})
|
||||
|
||||
test('Automatic parenthesis', () => {
|
||||
const block = Ast.parseBlock('main = func arg1 arg2')
|
||||
const block = Ast.parseModule('main = func arg1 arg2')
|
||||
block.module.setRoot(block)
|
||||
let arg1: Ast.MutableAst | undefined
|
||||
block.visitRecursiveAst((ast) => {
|
||||
block.visitRecursive((ast) => {
|
||||
if (ast instanceof Ast.MutableIdent && ast.code() === 'arg1') {
|
||||
assert(!arg1)
|
||||
arg1 = ast
|
||||
}
|
||||
})
|
||||
assert(arg1 != null)
|
||||
arg1.replace(Ast.parse('innerfunc innerarg', block.module))
|
||||
const replacementExpr = Ast.parseExpression('innerfunc innerarg', block.module)
|
||||
assertDefined(replacementExpr)
|
||||
arg1.replace(replacementExpr)
|
||||
const correctCode = 'main = func (innerfunc innerarg) arg2'
|
||||
// This assertion will fail when smart printing handles this case.
|
||||
// At that point we should test tree repair separately.
|
||||
@ -583,7 +610,7 @@ test('Tree repair: Non-canonical block line attribution', () => {
|
||||
'func a b =': Ast.Function,
|
||||
' c = a + b': Ast.Assignment,
|
||||
'main =': Ast.Function,
|
||||
' func arg1 arg2': Ast.App,
|
||||
' func arg1 arg2': Ast.ExpressionStatement,
|
||||
})
|
||||
const before = beforeCase.statements
|
||||
|
||||
@ -601,7 +628,7 @@ test('Tree repair: Non-canonical block line attribution', () => {
|
||||
'func a b =': Ast.Function,
|
||||
'c = a + b': Ast.Assignment,
|
||||
'main =': Ast.Function,
|
||||
'func arg1 arg2': Ast.App,
|
||||
'func arg1 arg2': Ast.ExpressionStatement,
|
||||
})
|
||||
const repairedFunc = afterRepair['func a b =']
|
||||
assert(repairedFunc.body instanceof Ast.BodyBlock)
|
||||
@ -617,8 +644,9 @@ test('Tree repair: Non-canonical block line attribution', () => {
|
||||
|
||||
describe('Code edit', () => {
|
||||
test('Change argument type', () => {
|
||||
const beforeRoot = Ast.parse('func arg1 arg2')
|
||||
beforeRoot.module.replaceRoot(beforeRoot)
|
||||
const beforeRoot = Ast.parseExpression('func arg1 arg2')
|
||||
assertDefined(beforeRoot)
|
||||
beforeRoot.module.setRoot(beforeRoot)
|
||||
const before = findExpressions(beforeRoot, {
|
||||
func: Ast.Ident,
|
||||
arg1: Ast.Ident,
|
||||
@ -646,8 +674,9 @@ describe('Code edit', () => {
|
||||
})
|
||||
|
||||
test('Insert argument names', () => {
|
||||
const beforeRoot = Ast.parse('func arg1 arg2')
|
||||
beforeRoot.module.replaceRoot(beforeRoot)
|
||||
const beforeRoot = Ast.parseExpression('func arg1 arg2')
|
||||
assertDefined(beforeRoot)
|
||||
beforeRoot.module.setRoot(beforeRoot)
|
||||
const before = findExpressions(beforeRoot, {
|
||||
func: Ast.Ident,
|
||||
arg1: Ast.Ident,
|
||||
@ -676,8 +705,9 @@ describe('Code edit', () => {
|
||||
})
|
||||
|
||||
test('Remove argument names', () => {
|
||||
const beforeRoot = Ast.parse('func name1=arg1 name2=arg2')
|
||||
beforeRoot.module.replaceRoot(beforeRoot)
|
||||
const beforeRoot = Ast.parseExpression('func name1=arg1 name2=arg2')
|
||||
assertDefined(beforeRoot)
|
||||
beforeRoot.module.setRoot(beforeRoot)
|
||||
const before = findExpressions(beforeRoot, {
|
||||
func: Ast.Ident,
|
||||
arg1: Ast.Ident,
|
||||
@ -768,8 +798,9 @@ describe('Code edit', () => {
|
||||
})
|
||||
|
||||
test('Inline expression change', () => {
|
||||
const beforeRoot = Ast.parse('func name1=arg1 name2=arg2')
|
||||
beforeRoot.module.replaceRoot(beforeRoot)
|
||||
const beforeRoot = Ast.parseExpression('func name1=arg1 name2=arg2')
|
||||
assertDefined(beforeRoot)
|
||||
beforeRoot.module.setRoot(beforeRoot)
|
||||
const before = findExpressions(beforeRoot, {
|
||||
func: Ast.Ident,
|
||||
arg1: Ast.Ident,
|
||||
@ -800,9 +831,10 @@ describe('Code edit', () => {
|
||||
|
||||
test('No-op inline expression change', () => {
|
||||
const code = 'a = 1'
|
||||
const expression = Ast.parse(code)
|
||||
const expression = Ast.parseStatement(code)
|
||||
assertDefined(expression)
|
||||
const module = expression.module
|
||||
module.replaceRoot(expression)
|
||||
module.setRoot(expression)
|
||||
expression.syncToCode(code)
|
||||
expect(module.root()?.code()).toBe(code)
|
||||
})
|
||||
@ -811,14 +843,14 @@ describe('Code edit', () => {
|
||||
const code = 'a = 1\nb = 2\n'
|
||||
const block = Ast.parseBlock(code)
|
||||
const module = block.module
|
||||
module.replaceRoot(block)
|
||||
module.setRoot(block)
|
||||
block.syncToCode(code)
|
||||
expect(module.root()?.code()).toBe(code)
|
||||
})
|
||||
|
||||
test('Shifting whitespace ownership', () => {
|
||||
const beforeRoot = Ast.parseModuleWithSpans('value = 1 +\n').root
|
||||
beforeRoot.module.replaceRoot(beforeRoot)
|
||||
const beforeRoot = Ast.parseModule('value = 1 +\n')
|
||||
beforeRoot.module.setRoot(beforeRoot)
|
||||
const before = findExpressions(beforeRoot, {
|
||||
value: Ast.Ident,
|
||||
'1': Ast.NumericLiteral,
|
||||
@ -841,9 +873,9 @@ describe('Code edit', () => {
|
||||
})
|
||||
|
||||
test('merging', () => {
|
||||
const block = Ast.parseModuleWithSpans('a = 1\nb = 2').root
|
||||
const block = Ast.parseModule('a = 1\nb = 2')
|
||||
const module = block.module
|
||||
module.replaceRoot(block)
|
||||
module.setRoot(block)
|
||||
|
||||
const editA = module.edit()
|
||||
editA.getVersion(block).syncToCode('a = 10\nb = 2')
|
||||
@ -858,7 +890,8 @@ describe('Code edit', () => {
|
||||
})
|
||||
|
||||
test('Analyze app-like', () => {
|
||||
const appLike = Ast.parse('(Preprocessor.default_preprocessor 3 _ 5 _ <| 4) <| 6')
|
||||
const appLike = Ast.parseExpression('(Preprocessor.default_preprocessor 3 _ 5 _ <| 4) <| 6')
|
||||
assertDefined(appLike)
|
||||
const { func, args } = Ast.analyzeAppLike(appLike)
|
||||
expect(func.code()).toBe('Preprocessor.default_preprocessor')
|
||||
expect(args.map((ast) => ast.code())).toEqual(['3', '4', '5', '6'])
|
||||
@ -904,9 +937,9 @@ test.each([
|
||||
])(
|
||||
'Substitute qualified name $pattern inside $original',
|
||||
({ original, pattern, substitution, expected }) => {
|
||||
const expression = Ast.parse(original)
|
||||
const expression = Ast.parseExpression(original) ?? Ast.parseStatement(original)
|
||||
const module = expression.module
|
||||
module.replaceRoot(expression)
|
||||
module.setRoot(expression)
|
||||
const edit = expression.module.edit()
|
||||
substituteQualifiedName(expression, pattern as Ast.Identifier, substitution as Ast.Identifier)
|
||||
module.applyEdit(edit)
|
||||
@ -960,9 +993,9 @@ test.each([
|
||||
])(
|
||||
'Substitute identifier $pattern inside $original',
|
||||
({ original, pattern, substitution, expected }) => {
|
||||
const expression = Ast.parse(original)
|
||||
const expression = Ast.parseExpression(original) ?? Ast.parseStatement(original)
|
||||
const module = expression.module
|
||||
module.replaceRoot(expression)
|
||||
module.setRoot(expression)
|
||||
const edit = expression.module.edit()
|
||||
substituteIdentifier(expression, pattern as Ast.Identifier, substitution as Ast.Identifier)
|
||||
module.applyEdit(edit)
|
||||
@ -1037,79 +1070,6 @@ test('setRawTextContent promotes single-line uninterpolated text to interpolated
|
||||
expect(literal.code()).toBe(`'${escapeTextLiteral(rawText)}'`)
|
||||
})
|
||||
|
||||
const docEditCases = [
|
||||
{ code: '## Simple\nnode', documentation: 'Simple' },
|
||||
{
|
||||
code: '## Preferred indent\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Preferred indent\n2nd line\n3rd line',
|
||||
},
|
||||
{
|
||||
code: '## Extra-indented child\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Extra-indented child\n2nd line\n3rd line',
|
||||
normalized: '## Extra-indented child\n 2nd line\n 3rd line\nnode',
|
||||
},
|
||||
{
|
||||
code: '## Extra-indented child, beyond 4th column\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Extra-indented child, beyond 4th column\n2nd line\n 3rd line',
|
||||
normalized: '## Extra-indented child, beyond 4th column\n 2nd line\n 3rd line\nnode',
|
||||
},
|
||||
{
|
||||
code: '##Preferred indent, no initial space\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Preferred indent, no initial space\n2nd line\n3rd line',
|
||||
normalized: '## Preferred indent, no initial space\n 2nd line\n 3rd line\nnode',
|
||||
},
|
||||
{
|
||||
code: '## Minimum indent\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Minimum indent\n2nd line\n3rd line',
|
||||
normalized: '## Minimum indent\n 2nd line\n 3rd line\nnode',
|
||||
},
|
||||
]
|
||||
test.each(docEditCases)('Documentation edit round trip: $code', (docCase) => {
|
||||
const { code, documentation } = docCase
|
||||
const parsed = Ast.Documented.tryParse(code)
|
||||
assert(parsed != null)
|
||||
const parsedDocumentation = parsed.documentation()
|
||||
expect(parsedDocumentation).toBe(documentation)
|
||||
const edited = MutableModule.Transient().copy(parsed)
|
||||
edited.setDocumentationText(parsedDocumentation)
|
||||
expect(edited.code()).toBe(docCase.normalized ?? code)
|
||||
})
|
||||
|
||||
test.each([
|
||||
'## Some documentation\nf x = 123',
|
||||
'## Some documentation\n and a second line\nf x = 123',
|
||||
'## Some documentation## Another documentation??\nf x = 123',
|
||||
])('Finding documentation: $code', (code) => {
|
||||
const block = Ast.parseBlock(code)
|
||||
const method = Ast.findModuleMethod(block, 'f')
|
||||
assertDefined(method)
|
||||
expect(method.documentingAncestor()).toBeDefined()
|
||||
})
|
||||
|
||||
test.each([
|
||||
{
|
||||
code: '## Already documented\nf x = 123',
|
||||
expected: '## Already documented\nf x = 123',
|
||||
},
|
||||
{
|
||||
code: 'f x = 123',
|
||||
expected: '## \nf x = 123',
|
||||
},
|
||||
])('Adding documentation: $code', ({ code, expected }) => {
|
||||
const block = Ast.parseBlock(code)
|
||||
const module = block.module
|
||||
const method = module.getVersion(Ast.findModuleMethod(block, 'f')!)
|
||||
method.getOrInitDocumentation()
|
||||
expect(block.code()).toBe(expected)
|
||||
})
|
||||
|
||||
test('Creating comments', () => {
|
||||
const expr = Ast.parse('2 + 2')
|
||||
expr.module.replaceRoot(expr)
|
||||
expr.update((expr) => Ast.Documented.new('Calculate five', expr))
|
||||
expect(expr.module.root()?.code()).toBe('## Calculate five\n2 + 2')
|
||||
})
|
||||
|
||||
test.each([
|
||||
{ code: 'operator1', expected: { subject: 'operator1', accesses: [] } },
|
||||
{ code: 'operator1 foo bar', expected: { subject: 'operator1 foo bar', accesses: [] } },
|
||||
@ -1132,7 +1092,7 @@ test.each([
|
||||
},
|
||||
{ code: 'operator1 + operator2', expected: { subject: 'operator1 + operator2', accesses: [] } },
|
||||
])('Access chain in $code', ({ code, expected }) => {
|
||||
const ast = Ast.parse(code)
|
||||
const ast = Ast.parseExpression(code)
|
||||
const { subject, accessChain } = Ast.accessChain(ast)
|
||||
expect({
|
||||
subject: subject.code(),
|
||||
@ -1148,7 +1108,7 @@ test.each`
|
||||
`('Pushing $pushed to vector $initial', ({ initial, pushed, expected }) => {
|
||||
const vector = Ast.Vector.tryParse(initial)
|
||||
assertDefined(vector)
|
||||
vector.push(Ast.parse(pushed, vector.module))
|
||||
vector.push(Ast.parseExpression(pushed, vector.module))
|
||||
expect(vector.code()).toBe(expected)
|
||||
})
|
||||
|
||||
@ -1228,7 +1188,7 @@ test.each`
|
||||
({ initial, index, value, expected }) => {
|
||||
const vector = Ast.Vector.tryParse(initial)
|
||||
assertDefined(vector)
|
||||
vector.set(index, Ast.parse(value, vector.module))
|
||||
vector.set(index, Ast.parseExpression(value, vector.module))
|
||||
expect(vector.code()).toBe(expected)
|
||||
},
|
||||
)
|
||||
@ -1250,7 +1210,7 @@ test.each`
|
||||
'Conversions between enso literals and js numbers: $ensoNumber',
|
||||
({ ensoNumber, jsNumber, expectedEnsoNumber }) => {
|
||||
if (ensoNumber != null) {
|
||||
const literal = Ast.parse(ensoNumber)
|
||||
const literal = Ast.parseExpression(ensoNumber)
|
||||
expect(tryEnsoToNumber(literal)).toBe(jsNumber)
|
||||
}
|
||||
if (jsNumber != null) {
|
||||
|
@ -0,0 +1,90 @@
|
||||
import { assert } from '@/util/assert'
|
||||
import { Ast } from '@/util/ast'
|
||||
import { test } from '@fast-check/vitest'
|
||||
import { expect } from 'vitest'
|
||||
|
||||
test.each([
|
||||
{ code: '## Simple\nnode', documentation: 'Simple' },
|
||||
{
|
||||
code: '## Preferred indent\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Preferred indent\n2nd line\n3rd line',
|
||||
},
|
||||
{
|
||||
code: '## Extra-indented child\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Extra-indented child\n2nd line\n3rd line',
|
||||
normalized: '## Extra-indented child\n 2nd line\n 3rd line\nnode',
|
||||
},
|
||||
{
|
||||
code: '## Extra-indented child, beyond 4th column\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Extra-indented child, beyond 4th column\n2nd line\n 3rd line',
|
||||
normalized: '## Extra-indented child, beyond 4th column\n 2nd line\n 3rd line\nnode',
|
||||
},
|
||||
{
|
||||
code: '##Preferred indent, no initial space\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Preferred indent, no initial space\n2nd line\n3rd line',
|
||||
normalized: '## Preferred indent, no initial space\n 2nd line\n 3rd line\nnode',
|
||||
},
|
||||
{
|
||||
code: '## Minimum indent\n 2nd line\n 3rd line\nnode',
|
||||
documentation: 'Minimum indent\n2nd line\n3rd line',
|
||||
normalized: '## Minimum indent\n 2nd line\n 3rd line\nnode',
|
||||
},
|
||||
])('Documentation edit round-trip: $code', (docCase) => {
|
||||
const { code, documentation } = docCase
|
||||
const parsed = Ast.parseStatement(code)!
|
||||
const parsedDocumentation = parsed.documentationText()
|
||||
expect(parsedDocumentation).toBe(documentation)
|
||||
const edited = Ast.MutableModule.Transient().copy(parsed)
|
||||
assert('setDocumentationText' in edited)
|
||||
edited.setDocumentationText(parsedDocumentation)
|
||||
expect(edited.code()).toBe(docCase.normalized ?? code)
|
||||
})
|
||||
|
||||
test.each([
|
||||
'## Some documentation\nf x = 123',
|
||||
'## Some documentation\n and a second line\nf x = 123',
|
||||
'## Some documentation## Another documentation??\nf x = 123',
|
||||
])('Finding documentation: $code', (code) => {
|
||||
const block = Ast.parseBlock(code)
|
||||
const method = Ast.findModuleMethod(block, 'f')!.statement
|
||||
expect(method.documentationText()).toBeTruthy()
|
||||
})
|
||||
|
||||
test.each([
|
||||
{
|
||||
code: '## Already documented\nf x = 123',
|
||||
expected: '## Already documented\nf x = 123',
|
||||
},
|
||||
{
|
||||
code: 'f x = 123',
|
||||
expected: '##\nf x = 123',
|
||||
},
|
||||
])('Adding documentation: $code', ({ code, expected }) => {
|
||||
const block = Ast.parseBlock(code)
|
||||
const module = block.module
|
||||
const method = module.getVersion(Ast.findModuleMethod(block, 'f')!.statement)
|
||||
if (method.documentationText() === undefined) {
|
||||
method.setDocumentationText('')
|
||||
}
|
||||
expect(block.code()).toBe(expected)
|
||||
})
|
||||
|
||||
test('Creating comments', () => {
|
||||
const block = Ast.parseBlock('2 + 2')
|
||||
block.module.setRoot(block)
|
||||
const statement = [...block.statements()][0]! as Ast.MutableExpressionStatement
|
||||
const docText = 'Calculate five'
|
||||
statement.setDocumentationText(docText)
|
||||
expect(statement.module.root()?.code()).toBe(`## ${docText}\n2 + 2`)
|
||||
})
|
||||
|
||||
test('Creating comments: indented', () => {
|
||||
const block = Ast.parseBlock('main =\n x = 1')
|
||||
const module = block.module
|
||||
module.setRoot(block)
|
||||
const main = module.getVersion(Ast.findModuleMethod(block, 'main')!.statement)
|
||||
const statement = [...main.bodyAsBlock().statements()][0]! as Ast.MutableAssignment
|
||||
const docText = 'The smallest natural number'
|
||||
statement.setDocumentationText(docText)
|
||||
expect(statement.module.root()?.code()).toBe(`main =\n ## ${docText}\n x = 1`)
|
||||
})
|
@ -80,9 +80,9 @@ test.each([
|
||||
extracted: ['with_enabled_context', "'current_context_name'", 'a + b'],
|
||||
},
|
||||
])('`isMatch` and `extractMatches`', ({ target, pattern, extracted }) => {
|
||||
const targetAst = Ast.parse(target)
|
||||
const targetAst = Ast.parseExpression(target)
|
||||
const module = targetAst.module
|
||||
const patternAst = Pattern.parse(pattern)
|
||||
const patternAst = Pattern.parseExpression(pattern)
|
||||
expect(
|
||||
patternAst.match(targetAst) !== undefined,
|
||||
`'${target}' has CST ${extracted != null ? '' : 'not '}matching '${pattern}'`,
|
||||
@ -101,9 +101,9 @@ test.each([
|
||||
{ template: 'a __ c', source: 'b', result: 'a b c' },
|
||||
{ template: 'a . __ . c', source: 'b', result: 'a . b . c' },
|
||||
])('instantiate', ({ template, source, result }) => {
|
||||
const pattern = Pattern.parse(template)
|
||||
const pattern = Pattern.parseExpression(template)
|
||||
const edit = MutableModule.Transient()
|
||||
const intron = Ast.parse(source, edit)
|
||||
const intron = Ast.parseExpression(source, edit)
|
||||
const instantiated = pattern.instantiate(edit, [intron])
|
||||
expect(instantiated.code()).toBe(result)
|
||||
})
|
||||
|
@ -10,17 +10,17 @@ test.each`
|
||||
${'## Documentation\n2 + 2'} | ${undefined} | ${'2 + 2'} | ${'Documentation'}
|
||||
${'## Documentation\nfoo = 2 + 2'} | ${'foo'} | ${'2 + 2'} | ${'Documentation'}
|
||||
`('Node information from AST $line line', ({ line, pattern, rootExpr, documentation }) => {
|
||||
const ast = Ast.Ast.parse(line)
|
||||
const ast = [...Ast.parseBlock(line).statements()][0]!
|
||||
const node = nodeFromAst(ast, false)
|
||||
expect(node?.outerExpr).toBe(ast)
|
||||
expect(node?.outerAst).toBe(ast)
|
||||
expect(node?.pattern?.code()).toBe(pattern)
|
||||
expect(node?.rootExpr.code()).toBe(rootExpr)
|
||||
expect(node?.innerExpr.code()).toBe(rootExpr)
|
||||
expect(node?.docs?.documentation()).toBe(documentation)
|
||||
expect(node?.outerAst.isStatement() && node.outerAst.documentationText()).toBe(documentation)
|
||||
})
|
||||
|
||||
test.each(['## Documentation only'])("'%s' should not be a node", (line) => {
|
||||
const ast = Ast.Ast.parse(line)
|
||||
const ast = Ast.parseStatement(line)
|
||||
const node = nodeFromAst(ast, false)
|
||||
expect(node).toBeUndefined()
|
||||
})
|
||||
@ -47,7 +47,7 @@ test.each([
|
||||
},
|
||||
{ code: 'operator1 + operator2', expected: undefined },
|
||||
])('Primary application subject of $code', ({ code, expected }) => {
|
||||
const ast = Ast.Ast.parse(code)
|
||||
const ast = Ast.parseExpression(code)
|
||||
const module = ast.module
|
||||
const primaryApplication = primaryApplicationSubject(ast)
|
||||
const analyzed = primaryApplication && {
|
||||
|
@ -1,134 +0,0 @@
|
||||
import { assert } from '@/util/assert'
|
||||
import { RawAstExtended } from '@/util/ast/extended'
|
||||
import { GeneralOprApp, operandsOfLeftAssocOprChain, type OperatorChain } from '@/util/ast/opr'
|
||||
import { RawAst } from '@/util/ast/raw'
|
||||
import { expect, test } from 'vitest'
|
||||
|
||||
test.each([
|
||||
{ code: '2 + 3', result: ['2', '+', '3'] },
|
||||
{ code: '2 + 4 + 5', result: ['2 + 4', '+', '5'] },
|
||||
{ code: '2\n + 3\n + 4', result: ['2', '+', '3', '+', '4'] },
|
||||
{ code: '2\n - 4\n * 5', result: ['2', '-', '4', '*', '5'] },
|
||||
{ code: 'foo . bar\n . baz', result: ['foo . bar', '.', 'baz'] },
|
||||
// See https://github.com/orgs/enso-org/discussions/8021
|
||||
// { code: '2 + 3\n + 4', result: ['2 + 3', '+', '4'] },
|
||||
{ code: '+ 2', result: [null, '+', '2'] },
|
||||
{ code: '2 +', result: ['2', '+', null] },
|
||||
{ code: '.foo', result: [null, '.', 'foo'] },
|
||||
{ code: 'foo.', result: ['foo', '.', null] },
|
||||
])('Generalized infix from $code', ({ code, result }) => {
|
||||
let ast = RawAstExtended.parseLine(code)
|
||||
if (ast.isTree(RawAst.Tree.Type.OprSectionBoundary)) {
|
||||
ast = ast.map((boundary) => boundary.ast)
|
||||
}
|
||||
assert(
|
||||
ast.isTree(RawAst.Tree.Type.OprApp) || ast.isTree(RawAst.Tree.Type.OperatorBlockApplication),
|
||||
)
|
||||
const opr = new GeneralOprApp(ast as OperatorChain<false>)
|
||||
expect(Array.from(opr.componentsReprs())).toStrictEqual(result)
|
||||
})
|
||||
|
||||
test.each([
|
||||
{
|
||||
code: '2 + 3',
|
||||
result: [
|
||||
{ type: 'ast', repr: '2' },
|
||||
{ type: 'ast', repr: '3' },
|
||||
],
|
||||
},
|
||||
{
|
||||
code: '2 + 3 + 4',
|
||||
result: [
|
||||
{ type: 'ast', repr: '2' },
|
||||
{ type: 'ast', repr: '3' },
|
||||
{ type: 'ast', repr: '4' },
|
||||
],
|
||||
},
|
||||
{
|
||||
code: '2 * 3 + 4',
|
||||
result: [
|
||||
{ type: 'ast', repr: '2 * 3' },
|
||||
{ type: 'ast', repr: '4' },
|
||||
],
|
||||
},
|
||||
{
|
||||
code: '2\n + 3\n + 4',
|
||||
result: [
|
||||
{ type: 'ast', repr: '2' },
|
||||
{ type: 'ast', repr: '3' },
|
||||
{ type: 'ast', repr: '4' },
|
||||
],
|
||||
},
|
||||
// See https://github.com/orgs/enso-org/discussions/8021
|
||||
// {
|
||||
// code: '2 + 3\n + 4',
|
||||
// result: [
|
||||
// { type: 'ast', repr: '2' },
|
||||
// { type: 'ast', repr: '3' },
|
||||
// { type: 'ast', repr: '4' },
|
||||
// ],
|
||||
// },
|
||||
// There is a bug in AST spans in some OperatorBlockApplications. Fix this test once fixed
|
||||
{
|
||||
code: '2\n * 3\n + 44',
|
||||
result: [
|
||||
{ type: 'partOfOprBlockApp', repr: '2\n * 3\n + 44', statements: 1 },
|
||||
{ type: 'ast', repr: '44' },
|
||||
],
|
||||
},
|
||||
{
|
||||
code: '2\n + 3\n * 4\n + 55',
|
||||
result: [
|
||||
{ type: 'partOfOprBlockApp', repr: '2\n + 3\n * 4\n + 55', statements: 2 },
|
||||
{ type: 'ast', repr: '55' },
|
||||
],
|
||||
},
|
||||
// https://github.com/orgs/enso-org/discussions/8021
|
||||
// {
|
||||
// code: '2 * 3\n + 4',
|
||||
// result: [
|
||||
// { type: 'ast', repr: '2 * 3' },
|
||||
// { type: 'ast', repr: '4' },
|
||||
// ],
|
||||
// },
|
||||
{
|
||||
code: 'foo bar',
|
||||
result: [{ type: 'ast', repr: 'foo bar' }],
|
||||
},
|
||||
{
|
||||
code: '2 * 3',
|
||||
opr: '+',
|
||||
result: [{ type: 'ast', repr: '2 * 3' }],
|
||||
},
|
||||
])(
|
||||
'Getting left-associative operator operands in $code',
|
||||
({
|
||||
code,
|
||||
opr,
|
||||
result,
|
||||
}: {
|
||||
code: string
|
||||
opr?: string
|
||||
result: { type: string; repr: string; statements?: number }[]
|
||||
}) => {
|
||||
const ast = RawAstExtended.parseLine(code)
|
||||
const actual = operandsOfLeftAssocOprChain(ast, opr)
|
||||
const actualWithExpected = Array.from(actual, (operand, i) => {
|
||||
return { actual: operand, expected: result[i] }
|
||||
})
|
||||
for (const { actual, expected } of actualWithExpected) {
|
||||
if (expected === null) {
|
||||
expect(actual).toBeNull()
|
||||
} else {
|
||||
expect(actual?.type).toStrictEqual(expected?.type)
|
||||
if (actual?.type === 'ast') {
|
||||
expect(actual.ast.repr()).toStrictEqual(expected?.repr)
|
||||
} else {
|
||||
assert(actual?.type == 'partOfOprBlockApp')
|
||||
expect(actual.ast.repr()).toStrictEqual(expected?.repr)
|
||||
expect(actual.statements).toStrictEqual(expected?.statements)
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
@ -1,4 +1,4 @@
|
||||
import { Ast } from '@/util/ast/abstract'
|
||||
import { Ast } from '@/util/ast'
|
||||
import { Prefixes } from '@/util/ast/prefixes'
|
||||
import { expect, test } from 'vitest'
|
||||
|
||||
@ -67,13 +67,13 @@ test.each([
|
||||
},
|
||||
])('modify', ({ prefixes: lines, modifications, source, target }) => {
|
||||
const prefixes = Prefixes.FromLines(lines as any)
|
||||
const sourceAst = Ast.parse(source)
|
||||
sourceAst.module.replaceRoot(sourceAst)
|
||||
const sourceAst = Ast.parseExpression(source)
|
||||
sourceAst.module.setRoot(sourceAst)
|
||||
const edit = sourceAst.module.edit()
|
||||
const modificationAsts = Object.fromEntries(
|
||||
Object.entries(modifications).map(([k, v]) => [
|
||||
k,
|
||||
v ? Array.from(v, (mod) => Ast.parse(mod, edit)) : undefined,
|
||||
v ? Array.from(v, (mod) => Ast.parseExpression(mod, edit)) : undefined,
|
||||
]),
|
||||
)
|
||||
prefixes.modify(edit.getVersion(sourceAst), modificationAsts)
|
||||
|
@ -1,17 +1,21 @@
|
||||
import {
|
||||
astContainingChar,
|
||||
childrenAstNodes,
|
||||
debugAst,
|
||||
rawParseLine,
|
||||
rawParseModule,
|
||||
readAstOrTokenSpan,
|
||||
readAstSpan,
|
||||
readTokenSpan,
|
||||
walkRecursive,
|
||||
} from '@/util/ast/raw'
|
||||
import { RawAst, rawParseModule, readAstOrTokenSpan, walkRecursive } from '@/util/ast/raw'
|
||||
import { assert, expect, test } from 'vitest'
|
||||
import { Token, Tree } from 'ydoc-shared/ast/generated/ast'
|
||||
import type { LazyObject } from 'ydoc-shared/ast/parserSupport'
|
||||
import { assertDefined } from 'ydoc-shared/util/assert'
|
||||
import { tryGetSoleValue } from 'ydoc-shared/util/data/iterable'
|
||||
|
||||
/**
|
||||
* Read a single line of code
|
||||
*
|
||||
* Helper for tests. If the code is multiline, an exception is raised.
|
||||
*/
|
||||
function rawParseLine(code: string): RawAst.Tree {
|
||||
const block = rawParseModule(code)
|
||||
const soleExpression = tryGetSoleValue(block.statements)?.expression
|
||||
assertDefined(soleExpression)
|
||||
return soleExpression
|
||||
}
|
||||
|
||||
function validateSpans(obj: LazyObject, initialPos?: number): number {
|
||||
const state = { pos: initialPos ?? 0 }
|
||||
@ -47,97 +51,17 @@ const parseCases = [
|
||||
'2\n + 3\n + 4',
|
||||
]
|
||||
|
||||
test.each(parseCases)("Parsing '%s'", (code) => {
|
||||
expect(debugAst(rawParseModule(code))).toMatchSnapshot()
|
||||
})
|
||||
|
||||
test.each(parseCases)("AST spans of '%s' are valid", (input) => {
|
||||
const tree = rawParseModule(input)
|
||||
const endPos = validateSpans(tree)
|
||||
expect(endPos).toStrictEqual(input.length)
|
||||
})
|
||||
|
||||
test("Reading AST node's code", () => {
|
||||
const code = 'Data.read File\n2 + 3'
|
||||
const ast = rawParseModule(code)
|
||||
expect(readAstSpan(ast, code)).toStrictEqual(code)
|
||||
assert(ast.type === Tree.Type.BodyBlock)
|
||||
const statements = Array.from(ast.statements)
|
||||
|
||||
assert(statements[0]?.expression != null)
|
||||
expect(readAstSpan(statements[0].expression, code)).toStrictEqual('Data.read File')
|
||||
assert(statements[0].expression.type === Tree.Type.App)
|
||||
expect(readAstSpan(statements[0].expression.func, code)).toStrictEqual('Data.read')
|
||||
expect(readAstSpan(statements[0].expression.arg, code)).toStrictEqual('File')
|
||||
|
||||
assert(statements[1]?.expression != null)
|
||||
expect(readAstSpan(statements[1].expression, code)).toStrictEqual('2 + 3')
|
||||
assert(statements[1].expression.type === Tree.Type.OprApp)
|
||||
assert(statements[1].expression.lhs != null)
|
||||
assert(statements[1].expression.rhs != null)
|
||||
assert(statements[1].expression.opr.ok)
|
||||
expect(readAstSpan(statements[1].expression.lhs, code)).toStrictEqual('2')
|
||||
expect(readTokenSpan(statements[1].expression.opr.value, code)).toStrictEqual('+')
|
||||
expect(readAstSpan(statements[1].expression.rhs, code)).toStrictEqual('3')
|
||||
})
|
||||
|
||||
test.each([
|
||||
[
|
||||
'2 + a',
|
||||
[
|
||||
{ type: Tree.Type.Number, repr: '2' },
|
||||
{ type: Tree.Type.Ident, repr: 'a' },
|
||||
],
|
||||
],
|
||||
[
|
||||
'a.b',
|
||||
[
|
||||
{ type: Tree.Type.Ident, repr: 'a' },
|
||||
{ type: Tree.Type.Ident, repr: 'b' },
|
||||
],
|
||||
],
|
||||
[
|
||||
'Data.read foo',
|
||||
[
|
||||
{ type: Tree.Type.OprApp, repr: 'Data.read' },
|
||||
{ type: Tree.Type.Ident, repr: 'foo' },
|
||||
],
|
||||
],
|
||||
['(2 + a)', [{ type: Tree.Type.OprApp, repr: '2 + a' }]],
|
||||
[
|
||||
'Data.read\n foo\n bar',
|
||||
[
|
||||
{ type: Tree.Type.OprApp, repr: 'Data.read' },
|
||||
{ type: Tree.Type.Ident, repr: 'foo' },
|
||||
{ type: Tree.Type.Ident, repr: 'bar' },
|
||||
],
|
||||
],
|
||||
[
|
||||
'Data.read file=foo',
|
||||
[
|
||||
{ type: Tree.Type.OprApp, repr: 'Data.read' },
|
||||
{ type: Tree.Type.Ident, repr: 'foo' },
|
||||
],
|
||||
],
|
||||
// These are Invalid nodes, so the child is a subtree containing the whole expression.
|
||||
['(', [{ type: Tree.Type.Group, repr: '(' }]],
|
||||
['(foo', [{ type: Tree.Type.Group, repr: '(foo' }]],
|
||||
])("Reading children of '%s'", (code, expected) => {
|
||||
const ast = rawParseLine(code)
|
||||
const children = Array.from(childrenAstNodes(ast))
|
||||
const childrenWithExpected = children.map((child, i) => {
|
||||
return { child, expected: expected[i] }
|
||||
})
|
||||
for (const { child, expected } of childrenWithExpected) {
|
||||
expect(child.type).toBe(expected?.type)
|
||||
expect(readAstSpan(child, code)).toBe(expected?.repr)
|
||||
}
|
||||
})
|
||||
|
||||
test.each([
|
||||
[
|
||||
'2 + a',
|
||||
[
|
||||
{ tree: Tree.Type.ExpressionStatement, repr: '2 + a' },
|
||||
{ tree: Tree.Type.OprApp, repr: '2 + a' },
|
||||
{ tree: Tree.Type.Number, repr: '2' },
|
||||
{ token: Token.Type.Digits, repr: '2' },
|
||||
@ -158,61 +82,3 @@ test.each([
|
||||
|
||||
expect(visitedRepr).toStrictEqual(expected)
|
||||
})
|
||||
|
||||
test.each([
|
||||
[
|
||||
'2 + a',
|
||||
0,
|
||||
[
|
||||
{ type: Tree.Type.Number, repr: '2' },
|
||||
{ type: Tree.Type.OprApp, repr: '2 + a' },
|
||||
{ type: Tree.Type.BodyBlock, repr: '2 + a' },
|
||||
],
|
||||
],
|
||||
[
|
||||
'Data.read foo',
|
||||
5,
|
||||
[
|
||||
{ type: Tree.Type.Ident, repr: 'read' },
|
||||
{ type: Tree.Type.OprApp, repr: 'Data.read' },
|
||||
{ type: Tree.Type.App, repr: 'Data.read foo' },
|
||||
{ type: Tree.Type.BodyBlock, repr: 'Data.read foo' },
|
||||
],
|
||||
],
|
||||
[
|
||||
'Data.read foo',
|
||||
4,
|
||||
[
|
||||
{ type: Tree.Type.OprApp, repr: 'Data.read' },
|
||||
{ type: Tree.Type.App, repr: 'Data.read foo' },
|
||||
{ type: Tree.Type.BodyBlock, repr: 'Data.read foo' },
|
||||
],
|
||||
],
|
||||
[
|
||||
'Data.read foo',
|
||||
9,
|
||||
[
|
||||
{ type: Tree.Type.App, repr: 'Data.read foo' },
|
||||
{ type: Tree.Type.BodyBlock, repr: 'Data.read foo' },
|
||||
],
|
||||
],
|
||||
[
|
||||
'Data.',
|
||||
4,
|
||||
[
|
||||
{ type: Tree.Type.OprApp, repr: 'Data.' },
|
||||
{ type: Tree.Type.OprSectionBoundary, repr: 'Data.' },
|
||||
{ type: Tree.Type.BodyBlock, repr: 'Data.' },
|
||||
],
|
||||
],
|
||||
])("Reading AST from code '%s' and position %i", (code, position, expected) => {
|
||||
const ast = rawParseModule(code)
|
||||
const astAtPosition = astContainingChar(position, ast)
|
||||
const resultWithExpected = astAtPosition.map((ast, i) => {
|
||||
return { ast, expected: expected[i] }
|
||||
})
|
||||
for (const { ast, expected } of resultWithExpected) {
|
||||
expect(ast.type).toBe(expected?.type)
|
||||
expect(readAstSpan(ast, code)).toBe(expected?.repr)
|
||||
}
|
||||
})
|
||||
|
@ -5,21 +5,28 @@ import { nextTick, watchEffect } from 'vue'
|
||||
import * as Y from 'yjs'
|
||||
|
||||
test('Module reactivity: applyEdit', async () => {
|
||||
const beforeEdit = Ast.parse('func arg1 arg2')
|
||||
beforeEdit.module.replaceRoot(beforeEdit)
|
||||
const beforeEdit = Ast.parseBlock('func arg1 arg2')
|
||||
beforeEdit.module.setRoot(beforeEdit)
|
||||
|
||||
const module = reactiveModule(new Y.Doc(), () => {})
|
||||
module.applyEdit(beforeEdit.module)
|
||||
expect(module.root()!.code()).toBe(beforeEdit.code())
|
||||
|
||||
const app2 = module.root() as unknown as Ast.App
|
||||
const app2 = (
|
||||
(module.root() as Ast.MutableBodyBlock).lines[0]!.statement!
|
||||
.node as Ast.MutableExpressionStatement
|
||||
).expression as unknown as Ast.App
|
||||
let app2Code: string | undefined = undefined
|
||||
watchEffect(() => (app2Code = app2.argument.code()))
|
||||
expect(app2Code).toBe('arg2')
|
||||
|
||||
const edit = beforeEdit.module.edit()
|
||||
const editApp2 = edit.getVersion(beforeEdit) as any as Ast.MutableApp
|
||||
editApp2.setArgument(Ast.Ident.tryParse('newArg', edit)!)
|
||||
const editApp2 = (
|
||||
edit.getVersion(beforeEdit).lines[0]!.statement!.node as Ast.MutableExpressionStatement
|
||||
).expression as Ast.MutableApp
|
||||
const newArg = Ast.Ident.tryParse('newArg', edit)
|
||||
expect(newArg).toBeDefined()
|
||||
editApp2.setArgument(newArg!)
|
||||
const codeAfterEdit = 'func arg1 newArg'
|
||||
expect(edit.root()!.code()).toBe(codeAfterEdit)
|
||||
|
||||
@ -30,8 +37,8 @@ test('Module reactivity: applyEdit', async () => {
|
||||
})
|
||||
|
||||
test('Module reactivity: Direct Edit', async () => {
|
||||
const beforeEdit = Ast.parse('func arg1 arg2')
|
||||
beforeEdit.module.replaceRoot(beforeEdit)
|
||||
const beforeEdit = Ast.parseExpression('func arg1 arg2')
|
||||
beforeEdit.module.setRoot(beforeEdit)
|
||||
|
||||
const module = reactiveModule(new Y.Doc(), () => {})
|
||||
module.applyEdit(beforeEdit.module)
|
||||
@ -52,29 +59,37 @@ test('Module reactivity: Direct Edit', async () => {
|
||||
})
|
||||
|
||||
test('Module reactivity: Tracking access to ancestors', async () => {
|
||||
const docsBeforeEdit = 'The main method'
|
||||
const beforeEdit = Ast.parseBlock(`## ${docsBeforeEdit}\nmain =\n 23`)
|
||||
beforeEdit.module.replaceRoot(beforeEdit)
|
||||
const beforeEdit = Ast.parseModule('main = 23\nother = f')
|
||||
beforeEdit.module.setRoot(beforeEdit)
|
||||
|
||||
const module = reactiveModule(new Y.Doc(), () => {})
|
||||
module.applyEdit(beforeEdit.module)
|
||||
expect(module.root()!.code()).toBe(beforeEdit.code())
|
||||
|
||||
const block = module.root() as any as Ast.BodyBlock
|
||||
const expression = ([...block.statements()][0] as Ast.Documented).expression as Ast.Function
|
||||
expect(expression.name.code()).toBe('main')
|
||||
let mainDocs: string | undefined = undefined
|
||||
watchEffect(() => (mainDocs = expression.documentingAncestor()?.documentation()))
|
||||
expect(mainDocs).toBe(docsBeforeEdit)
|
||||
|
||||
const [func, otherFunc] = block.statements() as [Ast.Function, Ast.Function]
|
||||
expect(func.name.code()).toBe('main')
|
||||
expect(otherFunc.name.code()).toBe('other')
|
||||
const expression = Array.from(func.bodyExpressions())[0]!
|
||||
expect(expression.code()).toBe('23')
|
||||
const otherExpression = Array.from(otherFunc.bodyExpressions())[0]!
|
||||
expect(otherExpression.code()).toBe('f')
|
||||
|
||||
let parentAccesses = 0
|
||||
watchEffect(() => {
|
||||
expect(expression.parent()).toBeDefined()
|
||||
parentAccesses += 1
|
||||
})
|
||||
expect(parentAccesses).toBe(1)
|
||||
|
||||
const edit = beforeEdit.module.edit()
|
||||
const editBlock = edit.getVersion(beforeEdit) as any as Ast.MutableBodyBlock
|
||||
const editDoc = [...editBlock.statements()][0] as Ast.MutableDocumented
|
||||
const docsAfterEdit = 'The main method, now with more documentation'
|
||||
editDoc.setDocumentationText(docsAfterEdit)
|
||||
|
||||
const taken = edit.getVersion(expression).replaceValue(Ast.parseExpression('replacement', edit))
|
||||
edit.getVersion(otherExpression).updateValue((oe) => Ast.App.positional(oe, taken, edit))
|
||||
module.applyEdit(edit)
|
||||
expect(mainDocs).toBe(docsBeforeEdit)
|
||||
|
||||
expect(module.root()?.code()).toBe('main = replacement\nother = f 23')
|
||||
expect(parentAccesses).toBe(1)
|
||||
await nextTick()
|
||||
expect(mainDocs).toBe(docsAfterEdit)
|
||||
expect(parentAccesses).toBe(2)
|
||||
})
|
||||
|
@ -12,7 +12,7 @@ test('Test SourceDocument', () => {
|
||||
const code = '1 + 1'
|
||||
const edit1 = syncModule.edit()
|
||||
const root = Ast.parseBlock(code, edit1)
|
||||
edit1.replaceRoot(root)
|
||||
edit1.setRoot(root)
|
||||
syncModule.applyEdit(edit1)
|
||||
expect(sourceDoc.text).toBe(code)
|
||||
|
||||
|
@ -20,9 +20,9 @@ export function testCase<T extends StringsWithTypeValues>(spec: T): TestCase<T>
|
||||
|
||||
const statementIndex = new Map<string, Ast.Ast>()
|
||||
const parsed = Ast.parseBlock(code)
|
||||
parsed.module.replaceRoot(parsed)
|
||||
parsed.module.setRoot(parsed)
|
||||
const statements = new Array<Ast.Ast>()
|
||||
parsed.visitRecursiveAst((ast) => {
|
||||
parsed.visitRecursive((ast) => {
|
||||
if (ast instanceof Ast.BodyBlock) statements.push(...ast.statements())
|
||||
})
|
||||
for (const statement of statements) {
|
||||
@ -54,7 +54,7 @@ export function tryFindExpressions<T extends StringsWithTypeValues>(
|
||||
): Partial<WithValuesInstantiated<T>> {
|
||||
const result: Partial<WithValuesInstantiated<T>> = {}
|
||||
const expressionsSought = new Set(Object.keys(expressions))
|
||||
root.visitRecursiveAst((ast) => {
|
||||
root.visitRecursive((ast) => {
|
||||
const code = ast.code()
|
||||
const trimmedFirstLine = code.split('\n', 1)[0]!.trim()
|
||||
if (!expressionsSought.has(trimmedFirstLine)) return
|
||||
|
@ -1,62 +1,52 @@
|
||||
import { normalizeQualifiedName, qnFromSegments } from '@/util/qualifiedName'
|
||||
import type {
|
||||
AstId,
|
||||
IdentifierOrOperatorIdentifier,
|
||||
Mutable,
|
||||
MutableAst,
|
||||
NodeKey,
|
||||
Owned,
|
||||
QualifiedName,
|
||||
TokenId,
|
||||
TokenKey,
|
||||
} from 'ydoc-shared/ast'
|
||||
import {
|
||||
Ast,
|
||||
BodyBlock,
|
||||
Expression,
|
||||
Function,
|
||||
Ident,
|
||||
IdentifierOrOperatorIdentifier,
|
||||
Mutable,
|
||||
MutableAst,
|
||||
MutableBodyBlock,
|
||||
MutableExpression,
|
||||
MutableFunction,
|
||||
MutableIdent,
|
||||
MutableModule,
|
||||
MutablePropertyAccess,
|
||||
MutableStatement,
|
||||
NegationApp,
|
||||
NumericLiteral,
|
||||
OprApp,
|
||||
Owned,
|
||||
PropertyAccess,
|
||||
QualifiedName,
|
||||
Statement,
|
||||
Token,
|
||||
isTokenId,
|
||||
parseExpression,
|
||||
print,
|
||||
} from 'ydoc-shared/ast'
|
||||
|
||||
export * from 'ydoc-shared/ast'
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function deserialize(serialized: string): Owned {
|
||||
const parsed: SerializedPrintedSource = JSON.parse(serialized)
|
||||
/** Given an output of {@link serializeExpression}, returns a deserialized expression. */
|
||||
export function deserializeExpression(serialized: string): Owned<MutableExpression> {
|
||||
// Not implemented: restoring serialized external IDs. This is not the best approach anyway;
|
||||
// Y.Js can't merge edits to objects when they're being serialized and deserialized.
|
||||
return Ast.parse(parsed.code)
|
||||
return parseExpression(serialized)!
|
||||
}
|
||||
|
||||
interface SerializedInfoMap {
|
||||
nodes: Record<NodeKey, AstId[]>
|
||||
tokens: Record<TokenKey, TokenId>
|
||||
}
|
||||
|
||||
interface SerializedPrintedSource {
|
||||
info: SerializedInfoMap
|
||||
code: string
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function serialize(ast: Ast): string {
|
||||
return JSON.stringify(print(ast))
|
||||
/** Returns a serialized representation of the expression. */
|
||||
export function serializeExpression(ast: Expression): string {
|
||||
return print(ast).code
|
||||
}
|
||||
|
||||
export type TokenTree = (TokenTree | string)[]
|
||||
/** TODO: Add docs */
|
||||
/** Returns a debug representation. */
|
||||
export function tokenTree(root: Ast): TokenTree {
|
||||
const module = root.module
|
||||
return Array.from(root.concreteChildren(), (child) => {
|
||||
return Array.from(root.concreteChildren({ verbatim: false, indent: '' }), (child) => {
|
||||
if (isTokenId(child.node)) {
|
||||
return module.getToken(child.node).code()
|
||||
} else {
|
||||
@ -71,7 +61,7 @@ export function tokenTreeWithIds(root: Ast): TokenTree {
|
||||
const module = root.module
|
||||
return [
|
||||
root.externalId,
|
||||
...Array.from(root.concreteChildren(), (child) => {
|
||||
...Array.from(root.concreteChildren({ verbatim: false, indent: '' }), (child) => {
|
||||
if (isTokenId(child.node)) {
|
||||
return module.getToken(child.node).code()
|
||||
} else {
|
||||
@ -86,38 +76,45 @@ export function tokenTreeWithIds(root: Ast): TokenTree {
|
||||
export function moduleMethodNames(topLevel: BodyBlock): Set<string> {
|
||||
const result = new Set<string>()
|
||||
for (const statement of topLevel.statements()) {
|
||||
const inner = statement.innerExpression()
|
||||
if (inner instanceof Function) {
|
||||
result.add(inner.name.code())
|
||||
}
|
||||
if (statement instanceof Function) result.add(statement.name.code())
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// FIXME: We should use alias analysis to handle ambiguous names correctly.
|
||||
/** TODO: Add docs */
|
||||
export function findModuleMethod(topLevel: BodyBlock, name: string): Function | undefined {
|
||||
for (const statement of topLevel.statements()) {
|
||||
const inner = statement.innerExpression()
|
||||
if (inner instanceof Function && inner.name.code() === name) {
|
||||
return inner
|
||||
}
|
||||
export function findModuleMethod(
|
||||
topLevel: MutableBodyBlock,
|
||||
name: string,
|
||||
): { statement: MutableFunction; index: number } | undefined
|
||||
export function findModuleMethod(
|
||||
topLevel: BodyBlock,
|
||||
name: string,
|
||||
): { statement: Function; index: number } | undefined
|
||||
/** Find the definition of the function with the specified name in the given block. */
|
||||
export function findModuleMethod(
|
||||
topLevel: BodyBlock,
|
||||
name: string,
|
||||
): { statement: Function; index: number } | undefined {
|
||||
// FIXME: We should use alias analysis to handle shadowing correctly.
|
||||
const isFunctionWithName = (statement: Statement, name: string) =>
|
||||
statement instanceof Function && statement.name.code() === name
|
||||
const index = topLevel.lines.findIndex(
|
||||
(line) => line.statement && isFunctionWithName(line.statement.node, name),
|
||||
)
|
||||
if (index === -1) return undefined
|
||||
const statement = topLevel.lines[index]!.statement!.node as Function
|
||||
return {
|
||||
/** The `Function` definition. */
|
||||
statement,
|
||||
/** The index into the block's `lines` where the definition was found. */
|
||||
index,
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function functionBlock(topLevel: BodyBlock, name: string) {
|
||||
const func = findModuleMethod(topLevel, name)
|
||||
if (!(func?.body instanceof BodyBlock)) return undefined
|
||||
return func.body
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function deleteFromParentBlock(ast: MutableAst) {
|
||||
/** Delete the specified statement from its containing block. */
|
||||
export function deleteFromParentBlock(ast: MutableStatement) {
|
||||
const parent = ast.mutableParent()
|
||||
if (parent instanceof MutableBodyBlock)
|
||||
parent.updateLines((lines) => lines.filter((line) => line.expression?.node.id !== ast.id))
|
||||
parent.updateLines((lines) => lines.filter((line) => line.statement?.node.id !== ast.id))
|
||||
}
|
||||
|
||||
/**
|
||||
@ -219,10 +216,10 @@ export function substituteQualifiedName(
|
||||
if (expr instanceof MutablePropertyAccess || expr instanceof MutableIdent) {
|
||||
const qn = parseQualifiedName(expr)
|
||||
if (qn === pattern) {
|
||||
expr.updateValue(() => Ast.parse(to, expr.module))
|
||||
expr.updateValue(() => parseExpression(to, expr.module)!)
|
||||
} else if (qn && qn.startsWith(pattern)) {
|
||||
const withoutPattern = qn.replace(pattern, '')
|
||||
expr.updateValue(() => Ast.parse(to + withoutPattern, expr.module))
|
||||
expr.updateValue(() => parseExpression(to + withoutPattern, expr.module)!)
|
||||
}
|
||||
} else {
|
||||
for (const child of expr.children()) {
|
||||
@ -269,6 +266,11 @@ export function copyIntoNewModule<T extends Ast>(ast: T): Owned<Mutable<T>> {
|
||||
return module.getVersion(ast) as Owned<Mutable<T>>
|
||||
}
|
||||
|
||||
/** Safely cast a mutable or owned value to its base type. */
|
||||
export function dropMutability<T extends Ast>(value: Owned<Mutable<T>>): T {
|
||||
return value as unknown as T
|
||||
}
|
||||
|
||||
declare const tokenKey: unique symbol
|
||||
declare module '@/providers/widgetRegistry' {
|
||||
export interface WidgetInputTypes {
|
||||
|
@ -1,14 +1,13 @@
|
||||
import { assert } from '@/util/assert'
|
||||
import {
|
||||
RawAst,
|
||||
astPrettyPrintType,
|
||||
parsedTreeOrTokenRange,
|
||||
rawParseModule,
|
||||
readAstOrTokenSpan,
|
||||
readTokenSpan,
|
||||
} from '@/util/ast/raw'
|
||||
import { MappedKeyMap, MappedSet, NonEmptyStack } from '@/util/containers'
|
||||
import type { LazyObject } from 'ydoc-shared/ast/parserSupport'
|
||||
import { LazyObject } from 'ydoc-shared/ast/parserSupport'
|
||||
import { rangeIsBefore, sourceRangeKey, type SourceRange } from 'ydoc-shared/yjsModel'
|
||||
|
||||
const ACCESSOR_OPERATOR = '.'
|
||||
@ -323,7 +322,7 @@ export class AliasAnalyzer {
|
||||
}
|
||||
}
|
||||
break
|
||||
case RawAst.Tree.Type.Documented:
|
||||
case RawAst.Tree.Type.ExpressionStatement:
|
||||
// Intentionally omit documentation, as it is not a "real" code.
|
||||
this.processTree(node.expression)
|
||||
break
|
||||
@ -348,3 +347,10 @@ function log(...messages: Array<() => any>) {
|
||||
console.log(...messages.map((message) => message()))
|
||||
}
|
||||
}
|
||||
|
||||
function astPrettyPrintType(obj: unknown): string | undefined {
|
||||
if (obj instanceof LazyObject && Object.hasOwnProperty.call(obj, 'type')) {
|
||||
const proto = Object.getPrototypeOf(obj)
|
||||
return proto?.constructor?.name
|
||||
}
|
||||
}
|
||||
|
@ -2,14 +2,21 @@ import { assert, assertDefined } from '@/util/assert'
|
||||
import { Ast } from '@/util/ast'
|
||||
import { zipLongest } from '@/util/data/iterable'
|
||||
|
||||
/** TODO: Add docs */
|
||||
export class Pattern {
|
||||
private readonly template: Ast.Ast
|
||||
/**
|
||||
* A pattern is an AST object with "placeholder" expressions.
|
||||
*
|
||||
* It can be used in two ways:
|
||||
* - It can be matched against an AST node, in which case each placeholder will match any expression, and the matches
|
||||
* will be returned.
|
||||
* - It can be instantiated, by providing an expression to be substituted for each placeholder.
|
||||
*/
|
||||
export class Pattern<T extends Ast.Ast = Ast.Expression> {
|
||||
private readonly template: T
|
||||
private readonly placeholders: Ast.AstId[]
|
||||
private readonly placeholder: string
|
||||
|
||||
private constructor(template: Ast.Owned, placeholder: string) {
|
||||
this.template = template
|
||||
private constructor(template: Ast.Owned<Ast.Mutable<T>>, placeholder: string) {
|
||||
this.template = Ast.dropMutability(template)
|
||||
this.placeholders = findPlaceholders(template, placeholder)
|
||||
this.placeholder = placeholder
|
||||
}
|
||||
@ -18,13 +25,20 @@ export class Pattern {
|
||||
* Parse an expression template in which a specified identifier (by default `__`)
|
||||
* may match any arbitrary subtree.
|
||||
*/
|
||||
static parse(template: string, placeholder: string = '__'): Pattern {
|
||||
const ast = Ast.parse(template)
|
||||
static parseExpression(template: string, placeholder: string = '__'): Pattern {
|
||||
const ast = Ast.parseExpression(template)
|
||||
assertDefined(ast)
|
||||
return new Pattern(ast, placeholder)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
static new(f: (placeholder: Ast.Owned) => Ast.Owned, placeholder: string = '__'): Pattern {
|
||||
/**
|
||||
* Given a function that constructs an AST value when provided an expression, creates a `Pattern` that constructs an
|
||||
* equivalent AST value when instantiated with an expression.
|
||||
*/
|
||||
static new<T extends Ast.Ast>(
|
||||
f: (placeholder: Ast.Owned<Ast.MutableExpression>) => Ast.Owned<Ast.Mutable<T>>,
|
||||
placeholder: string = '__',
|
||||
): Pattern<T> {
|
||||
assert(Ast.isIdentifier(placeholder))
|
||||
const module = Ast.MutableModule.Transient()
|
||||
return new Pattern(f(Ast.Ident.new(module, placeholder)), placeholder)
|
||||
@ -48,7 +62,10 @@ export class Pattern {
|
||||
}
|
||||
|
||||
/** Create a new concrete example of the pattern, with the placeholders replaced with the given subtrees. */
|
||||
instantiate(edit: Ast.MutableModule, subtrees: Ast.Owned[]): Ast.Owned {
|
||||
instantiate(
|
||||
edit: Ast.MutableModule,
|
||||
subtrees: Ast.Owned<Ast.MutableExpression>[],
|
||||
): Ast.Owned<Ast.Mutable<T>> {
|
||||
const template = edit.copy(this.template)
|
||||
const placeholders = findPlaceholders(template, this.placeholder).map((ast) => edit.tryGet(ast))
|
||||
for (const [placeholder, replacement] of zipLongest(placeholders, subtrees)) {
|
||||
@ -59,20 +76,20 @@ export class Pattern {
|
||||
return template
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
instantiateCopied(subtrees: Ast.Ast[], edit?: Ast.MutableModule): Ast.Owned {
|
||||
/**
|
||||
* Helper that creates the AST described by the pattern, as {@link instantiate}, after first copying each of the
|
||||
* referenced subtrees into a different module.
|
||||
*/
|
||||
instantiateCopied(
|
||||
subtrees: (Ast.Expression | Ast.MutableExpression)[],
|
||||
edit?: Ast.MutableModule,
|
||||
): Ast.Owned<Ast.Mutable<T>> {
|
||||
const module = edit ?? Ast.MutableModule.Transient()
|
||||
return this.instantiate(
|
||||
module,
|
||||
subtrees.map((ast) => module.copy(ast)),
|
||||
)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
compose(f: (pattern: Ast.Owned) => Ast.Owned): Pattern {
|
||||
const module = Ast.MutableModule.Transient()
|
||||
return new Pattern(f(module.copy(this.template)), this.placeholder)
|
||||
}
|
||||
}
|
||||
|
||||
function findPlaceholders(ast: Ast.Ast, placeholder: string): Ast.AstId[] {
|
||||
|
@ -8,57 +8,52 @@ export const prefixes = Prefixes.FromLines({
|
||||
})
|
||||
|
||||
/** Given a node's outer expression, find the root expression and any statements wrapping it. */
|
||||
export function nodeRootExpr(ast: Ast.Ast): {
|
||||
root: Ast.Ast | undefined
|
||||
docs: Ast.Documented | undefined
|
||||
export function nodeRootExpr(ast: Ast.Statement | Ast.Expression): {
|
||||
root: Ast.Expression | undefined
|
||||
assignment: Ast.Assignment | undefined
|
||||
} {
|
||||
const [withinDocs, docs] =
|
||||
ast instanceof Ast.Documented ? [ast.expression, ast] : [ast, undefined]
|
||||
const [withinAssignment, assignment] =
|
||||
withinDocs instanceof Ast.Assignment ?
|
||||
[withinDocs.expression, withinDocs]
|
||||
: [withinDocs, undefined]
|
||||
const assignment = ast instanceof Ast.Assignment ? ast : undefined
|
||||
const root =
|
||||
assignment ? assignment.expression
|
||||
: ast instanceof Ast.ExpressionStatement ? ast.expression
|
||||
: undefined
|
||||
return {
|
||||
root: withinAssignment,
|
||||
docs,
|
||||
root,
|
||||
assignment,
|
||||
}
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function inputNodeFromAst(ast: Ast.Ast, argIndex: number): NodeDataFromAst {
|
||||
/** Create a Node from the pattern of a function argument. */
|
||||
export function inputNodeFromAst(ast: Ast.Expression, argIndex: number): NodeDataFromAst {
|
||||
return {
|
||||
type: 'input',
|
||||
outerExpr: ast,
|
||||
outerAst: ast,
|
||||
pattern: undefined,
|
||||
rootExpr: ast,
|
||||
innerExpr: ast,
|
||||
prefixes: { enableRecording: undefined },
|
||||
primarySubject: undefined,
|
||||
conditionalPorts: new Set(),
|
||||
docs: undefined,
|
||||
argIndex,
|
||||
}
|
||||
}
|
||||
|
||||
/** Given a node's outer expression, return all the `Node` fields that depend on its AST structure. */
|
||||
export function nodeFromAst(ast: Ast.Ast, isOutput: boolean): NodeDataFromAst | undefined {
|
||||
const { root, docs, assignment } = nodeRootExpr(ast)
|
||||
export function nodeFromAst(ast: Ast.Statement, isOutput: boolean): NodeDataFromAst | undefined {
|
||||
const { root, assignment } = nodeRootExpr(ast)
|
||||
if (!root) return
|
||||
const { innerExpr, matches } = prefixes.extractMatches(root)
|
||||
const type = assignment == null && isOutput ? 'output' : 'component'
|
||||
const primaryApplication = primaryApplicationSubject(innerExpr)
|
||||
return {
|
||||
type,
|
||||
outerExpr: ast,
|
||||
outerAst: ast,
|
||||
pattern: assignment?.pattern,
|
||||
rootExpr: root,
|
||||
innerExpr,
|
||||
prefixes: matches,
|
||||
primarySubject: primaryApplication?.subject,
|
||||
conditionalPorts: new Set(primaryApplication?.accessChain ?? []),
|
||||
docs,
|
||||
argIndex: undefined,
|
||||
}
|
||||
}
|
||||
@ -68,7 +63,7 @@ export function nodeFromAst(ast: Ast.Ast, isOutput: boolean): NodeDataFromAst |
|
||||
* application.
|
||||
*/
|
||||
export function primaryApplicationSubject(
|
||||
ast: Ast.Ast,
|
||||
ast: Ast.Expression,
|
||||
): { subject: Ast.AstId; accessChain: Ast.AstId[] } | undefined {
|
||||
// Descend into LHS of any sequence of applications.
|
||||
while (ast instanceof Ast.App) ast = ast.function
|
||||
|
@ -1,157 +0,0 @@
|
||||
import { assert } from '@/util/assert'
|
||||
import { RawAstExtended } from '@/util/ast/extended'
|
||||
import { RawAst } from '@/util/ast/raw'
|
||||
import { zip } from '@/util/data/iterable'
|
||||
import { mapIterator } from 'lib0/iterator'
|
||||
|
||||
/** An operand of one of the applications inside `GeneralOprApp` */
|
||||
export type GeneralOperand<HasIdMap extends boolean = true> =
|
||||
| Operand<HasIdMap>
|
||||
// A part of `GeneralOprApp`, consisting of lhs and first `statements` of applications.
|
||||
| { type: 'partOfGeneralOprApp'; oprApp: GeneralOprApp<HasIdMap>; statements: number }
|
||||
|
||||
export type OperatorChain<HasIdMap extends boolean = true> = RawAstExtended<
|
||||
RawAst.Tree.OprApp | RawAst.Tree.OperatorBlockApplication,
|
||||
HasIdMap
|
||||
>
|
||||
|
||||
/** A structure unifying API of OprApp and OperatorBlockApplication */
|
||||
export class GeneralOprApp<HasIdMap extends boolean = true> {
|
||||
lhs: RawAstExtended<RawAst.Tree, HasIdMap> | null
|
||||
apps: {
|
||||
opr: RawAstExtended<RawAst.Token.Operator, HasIdMap> | null
|
||||
expr: RawAstExtended<RawAst.Tree, HasIdMap> | null
|
||||
}[]
|
||||
|
||||
/** TODO: Add docs */
|
||||
constructor(ast: OperatorChain<HasIdMap>) {
|
||||
this.lhs = ast.tryMap((t) => t.lhs) ?? null
|
||||
if (ast.isTree(RawAst.Tree.Type.OprApp)) {
|
||||
const rhs = ast.tryMap((t) => t.rhs) ?? null
|
||||
const opr = ast.tryMap((t) => (t.opr.ok ? t.opr.value : undefined)) ?? null
|
||||
this.apps = [{ opr, expr: rhs }]
|
||||
} else {
|
||||
const blockApplication = ast as RawAstExtended<RawAst.Tree.OperatorBlockApplication, HasIdMap>
|
||||
const expressions = (line: RawAst.OperatorLine): RawAst.OperatorBlockExpression[] =>
|
||||
line.expression ? [line.expression] : []
|
||||
const operators = blockApplication.tryMapIter((ast) =>
|
||||
[...ast.expressions]
|
||||
.flatMap(expressions)
|
||||
.map((expr) => (expr.operator.ok ? expr.operator.value : null))
|
||||
.values(),
|
||||
)
|
||||
const exprs = blockApplication.mapIter((ast) =>
|
||||
[...ast.expressions]
|
||||
.flatMap(expressions)
|
||||
.map((expr) => expr.expression)
|
||||
.values(),
|
||||
)
|
||||
this.apps = Array.from(
|
||||
mapIterator(zip(operators, exprs), ([opr, expr]) => ({
|
||||
opr: opr ? opr : null,
|
||||
expr: expr ? expr : null,
|
||||
})),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Last operator */
|
||||
lastOpr(): RawAstExtended<RawAst.Token.Operator, HasIdMap> | null {
|
||||
return this.apps[this.apps.length - 1]?.opr ?? null
|
||||
}
|
||||
|
||||
/** Returns representation of all operands interleaved with appropriate operators */
|
||||
*componentsReprs(): Generator<string | null> {
|
||||
yield this.lhs != null ? this.lhs.repr() : null
|
||||
for (const app of this.apps) {
|
||||
yield app.opr != null ? app.opr.repr() : null
|
||||
yield app.expr != null ? app.expr.repr() : null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read operands of an operator chain. Operator is assumed to be left-associative.
|
||||
*
|
||||
* Works like `operandsOfLeftAssocOprChain` defined in this module, see its docs for details.
|
||||
*/
|
||||
*operandsOfLeftAssocOprChain(expectedOpr?: string): Generator<GeneralOperand<HasIdMap> | null> {
|
||||
// If this represents an OperatorBlockApplication, there may be many different operators. Our chain
|
||||
// ends with the first not matching starting from the end.
|
||||
let matchingOprs
|
||||
for (matchingOprs = 0; matchingOprs < this.apps.length; matchingOprs++) {
|
||||
const app = this.apps[this.apps.length - matchingOprs - 1]!
|
||||
if (!app.opr) break
|
||||
const oprCode = app.opr.repr()
|
||||
if (expectedOpr != null && oprCode != expectedOpr) break
|
||||
expectedOpr = oprCode
|
||||
}
|
||||
if (matchingOprs === this.apps.length) {
|
||||
// If all operators matched, the lhs may be a continuation of this chain.
|
||||
if (this.lhs != null) yield* operandsOfLeftAssocOprChain(this.lhs, expectedOpr)
|
||||
else yield null
|
||||
} else {
|
||||
// Not all operators matched; the first operand will be a part of this GeneralOprApp.
|
||||
yield {
|
||||
type: 'partOfGeneralOprApp',
|
||||
oprApp: this,
|
||||
statements: this.apps.length - matchingOprs,
|
||||
}
|
||||
}
|
||||
for (let i = this.apps.length - matchingOprs; i < this.apps.length; ++i) {
|
||||
const app = this.apps[i]
|
||||
if (app?.expr != null) yield { type: 'ast', ast: app.expr }
|
||||
else yield null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An operand of some operator application chain.
|
||||
*
|
||||
* There is a special case, where operand is a part of OperatorBlockApplication which is not
|
||||
* representable by any AST structure.
|
||||
*/
|
||||
export type Operand<HasIdMap extends boolean = true> =
|
||||
| { type: 'ast'; ast: RawAstExtended<RawAst.Tree, HasIdMap> }
|
||||
| {
|
||||
type: 'partOfOprBlockApp'
|
||||
ast: RawAstExtended<RawAst.Tree.OperatorBlockApplication, HasIdMap>
|
||||
statements: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Read operands of an operator chain. Operator is assumed to be left-associative.
|
||||
*
|
||||
* It flattens applications of same operator, e.g. for `2 + 3 + 4` will return `2`, `3`, and `4`,
|
||||
* but `2 - 3 + 4` will return `2 - 3` as first operand, and then `4`. If the ast is not
|
||||
* an operator application (of this specific operator if provided), `this` will be returned as
|
||||
* a single operand.
|
||||
* @param ast the subtree which we assume is an operator application chain.
|
||||
* @param expectedOpr if specified, the chain will be of specific operator.
|
||||
*/
|
||||
export function* operandsOfLeftAssocOprChain<HasIdMap extends boolean = true>(
|
||||
ast: RawAstExtended<RawAst.Tree, HasIdMap>,
|
||||
expectedOpr?: string,
|
||||
): Generator<Operand<HasIdMap> | null> {
|
||||
switch (ast.inner.type) {
|
||||
case RawAst.Tree.Type.OprApp:
|
||||
case RawAst.Tree.Type.OperatorBlockApplication: {
|
||||
const oprApp = new GeneralOprApp(ast as OperatorChain<HasIdMap>)
|
||||
for (const operand of oprApp.operandsOfLeftAssocOprChain(expectedOpr)) {
|
||||
if (operand == null || operand.type !== 'partOfGeneralOprApp') yield operand
|
||||
else {
|
||||
const isEntireOprApp = operand.statements === oprApp.apps.length
|
||||
if (isEntireOprApp) {
|
||||
yield { type: 'ast', ast }
|
||||
} else {
|
||||
assert(ast.isTree(RawAst.Tree.Type.OperatorBlockApplication))
|
||||
yield { type: 'partOfOprBlockApp', ast, statements: operand.statements }
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
default:
|
||||
yield { type: 'ast', ast }
|
||||
}
|
||||
}
|
@ -5,7 +5,7 @@ import { unsafeKeys } from '@/util/record'
|
||||
type Matches<T> = Record<keyof T, Ast.AstId[] | undefined>
|
||||
|
||||
interface MatchResult<T> {
|
||||
innerExpr: Ast.Ast
|
||||
innerExpr: Ast.Expression
|
||||
matches: Record<keyof T, Ast.AstId[] | undefined>
|
||||
}
|
||||
|
||||
@ -21,18 +21,18 @@ export class Prefixes<T extends Record<keyof T, Pattern>> {
|
||||
static FromLines<T>(lines: Record<keyof T, string>) {
|
||||
return new Prefixes(
|
||||
Object.fromEntries(
|
||||
Object.entries<string>(lines).map(([name, line]) => [name, Pattern.parse(line)]),
|
||||
Object.entries<string>(lines).map(([name, line]) => [name, Pattern.parseExpression(line)]),
|
||||
) as Record<keyof T, Pattern>,
|
||||
)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
extractMatches(expression: Ast.Ast): MatchResult<T> {
|
||||
extractMatches(expression: Ast.Expression): MatchResult<T> {
|
||||
const matches = Object.fromEntries(
|
||||
Object.entries<Pattern>(this.prefixes).map(([name, pattern]) => {
|
||||
const matches = pattern.match(expression)
|
||||
const lastMatch = matches != null ? matches[matches.length - 1] : undefined
|
||||
if (lastMatch) expression = expression.module.get(lastMatch)
|
||||
if (lastMatch) expression = expression.module.get(lastMatch) as Ast.Expression
|
||||
return [name, matches]
|
||||
}),
|
||||
) as Matches<T>
|
||||
@ -40,15 +40,19 @@ export class Prefixes<T extends Record<keyof T, Pattern>> {
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
modify(expression: Ast.Mutable, replacements: Partial<Record<keyof T, Ast.Owned[] | undefined>>) {
|
||||
modify(
|
||||
expression: Ast.MutableExpression,
|
||||
replacements: Partial<Record<keyof T, Ast.Owned<Ast.MutableExpression>[] | undefined>>,
|
||||
) {
|
||||
expression.updateValue((expression) => {
|
||||
const matches = this.extractMatches(expression)
|
||||
const matches = this.extractMatches(expression as Ast.Owned<Ast.MutableExpression>)
|
||||
const edit = expression.module
|
||||
let result = edit.take(matches.innerExpr.id)
|
||||
let result = edit.getVersion(matches.innerExpr).take()
|
||||
for (const key of unsafeKeys(this.prefixes).reverse()) {
|
||||
if (key in replacements && !replacements[key]) continue
|
||||
const replacement: Ast.Owned[] | undefined =
|
||||
replacements[key] ?? matches.matches[key]?.map((match) => edit.take(match)!)
|
||||
const replacement: Ast.Owned<Ast.MutableExpression>[] | undefined =
|
||||
replacements[key] ??
|
||||
matches.matches[key]?.map((match) => edit.take(match) as Ast.Owned<Ast.MutableExpression>)
|
||||
if (!replacement) continue
|
||||
const pattern = this.prefixes[key]
|
||||
const parts = [...replacement, result]
|
||||
|
@ -1,28 +1,12 @@
|
||||
import { assertDefined } from '@/util/assert'
|
||||
import * as map from 'lib0/map'
|
||||
import * as RawAst from 'ydoc-shared/ast/generated/ast'
|
||||
import { rawParseModule } from 'ydoc-shared/ast/parse'
|
||||
import { LazyObject, LazySequence } from 'ydoc-shared/ast/parserSupport'
|
||||
import { tryGetSoleValue } from 'ydoc-shared/util/data/iterable'
|
||||
import { isResult, mapOk } from 'ydoc-shared/util/data/result'
|
||||
import { LazyObject } from 'ydoc-shared/ast/parserSupport'
|
||||
import type { SourceRange } from 'ydoc-shared/yjsModel'
|
||||
|
||||
export { RawAst, rawParseModule }
|
||||
|
||||
export type HasAstRange = SourceRange | RawAst.Tree | RawAst.Token
|
||||
|
||||
/**
|
||||
* Read a single line of code
|
||||
*
|
||||
* Is meant to be a helper for tests. If the code is multiline, an exception is raised.
|
||||
*/
|
||||
export function rawParseLine(code: string): RawAst.Tree {
|
||||
const block = rawParseModule(code)
|
||||
const soleExpression = tryGetSoleValue(block.statements)?.expression
|
||||
assertDefined(soleExpression)
|
||||
return soleExpression
|
||||
}
|
||||
|
||||
/**
|
||||
* Read span of code represented by given AST node, not including left whitespace offset.
|
||||
*
|
||||
@ -34,16 +18,6 @@ export function readAstOrTokenSpan(node: RawAst.Tree | RawAst.Token, code: strin
|
||||
return code.substring(range[0], range[1])
|
||||
}
|
||||
|
||||
/**
|
||||
* Read span of code represented by given RawAst.Tree.
|
||||
*
|
||||
* The Tree is assumed to be a part of AST generated from `code`.
|
||||
*/
|
||||
export function readAstSpan(node: RawAst.Tree, code: string): string {
|
||||
const range = parsedTreeRange(node)
|
||||
return code.substring(range[0], range[1])
|
||||
}
|
||||
|
||||
/**
|
||||
* Read span of code represented by given RawAst.Token.
|
||||
*
|
||||
@ -64,6 +38,7 @@ export function childrenAstNodes(obj: LazyObject): RawAst.Tree[] {
|
||||
obj.visitChildren(visitor)
|
||||
return children
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function childrenAstNodesOrTokens(obj: LazyObject): (RawAst.Tree | RawAst.Token)[] {
|
||||
const children: (RawAst.Tree | RawAst.Token)[] = []
|
||||
@ -78,46 +53,6 @@ export function childrenAstNodesOrTokens(obj: LazyObject): (RawAst.Tree | RawAst
|
||||
return children
|
||||
}
|
||||
|
||||
/** Returns all AST nodes from `root` tree containing given char, starting from the most nested. */
|
||||
export function astContainingChar(charIndex: number, root: RawAst.Tree): RawAst.Tree[] {
|
||||
return treePath(root, (node) => {
|
||||
const begin = node.whitespaceStartInCodeParsed + node.whitespaceLengthInCodeParsed
|
||||
const end = begin + node.childrenLengthInCodeParsed
|
||||
return charIndex >= begin && charIndex < end
|
||||
}).reverse()
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a predicate, return a path from the root down the tree containing the
|
||||
* first node at each level found to satisfy the predicate.
|
||||
*/
|
||||
function treePath(obj: LazyObject, pred: (node: RawAst.Tree) => boolean): RawAst.Tree[] {
|
||||
const path: RawAst.Tree[] = []
|
||||
const visitor = (obj: LazyObject) => {
|
||||
if (RawAst.Tree.isInstance(obj)) {
|
||||
const isMatch = pred(obj)
|
||||
if (isMatch) path.push(obj)
|
||||
return obj.visitChildren(visitor) || isMatch
|
||||
} else {
|
||||
return obj.visitChildren(visitor)
|
||||
}
|
||||
}
|
||||
obj.visitChildren(visitor)
|
||||
return path
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function findAstWithRange(
|
||||
root: RawAst.Tree | RawAst.Token,
|
||||
range: SourceRange,
|
||||
): RawAst.Tree | RawAst.Token | undefined {
|
||||
for (const child of childrenAstNodes(root)) {
|
||||
const [begin, end] = parsedTreeOrTokenRange(child)
|
||||
if (begin === range[0] && end === range[1]) return child
|
||||
if (begin <= range[0] && end >= range[1]) return findAstWithRange(child, range)
|
||||
}
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function* walkRecursive(
|
||||
node: RawAst.Tree | RawAst.Token,
|
||||
@ -158,14 +93,14 @@ export function visitRecursive(
|
||||
* @returns Object with `start` and `end` properties; index of first character in the `node`
|
||||
* and first character _not_ being in the `node`.
|
||||
*/
|
||||
export function parsedTreeRange(tree: RawAst.Tree): SourceRange {
|
||||
function parsedTreeRange(tree: RawAst.Tree): SourceRange {
|
||||
const start = tree.whitespaceStartInCodeParsed + tree.whitespaceLengthInCodeParsed
|
||||
const end = start + tree.childrenLengthInCodeParsed
|
||||
return [start, end]
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function parsedTokenRange(token: RawAst.Token): SourceRange {
|
||||
function parsedTokenRange(token: RawAst.Token): SourceRange {
|
||||
const start = token.startInCodeBuffer
|
||||
const end = start + token.lengthInCodeBuffer
|
||||
return [start, end]
|
||||
@ -177,65 +112,3 @@ export function parsedTreeOrTokenRange(node: HasAstRange): SourceRange {
|
||||
else if (RawAst.Token.isInstance(node)) return parsedTokenRange(node)
|
||||
else return node
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function astPrettyPrintType(obj: unknown): string | undefined {
|
||||
if (obj instanceof LazyObject && Object.hasOwnProperty.call(obj, 'type')) {
|
||||
const proto = Object.getPrototypeOf(obj)
|
||||
return proto?.constructor?.name
|
||||
}
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function debugAst(obj: unknown): unknown {
|
||||
if (obj instanceof LazyObject) {
|
||||
const fields = Object.fromEntries(
|
||||
allGetterNames(obj).map((k) => [k, debugAst((obj as any)[k])]),
|
||||
)
|
||||
if (Object.hasOwnProperty.call(obj, 'type')) {
|
||||
const className = astPrettyPrintType(obj)
|
||||
return { type: className, ...fields }
|
||||
} else {
|
||||
return fields
|
||||
}
|
||||
} else if (obj instanceof LazySequence) {
|
||||
return Array.from(obj, debugAst)
|
||||
} else if (isResult(obj)) {
|
||||
return mapOk(obj, debugAst)
|
||||
} else {
|
||||
return obj
|
||||
}
|
||||
}
|
||||
|
||||
const protoGetters = new Map()
|
||||
function allGetterNames(obj: object): string[] {
|
||||
let proto = Object.getPrototypeOf(obj)
|
||||
return map.setIfUndefined(protoGetters, proto, () => {
|
||||
const props = new Map<string, PropertyDescriptor>()
|
||||
do {
|
||||
for (const [name, prop] of Object.entries(Object.getOwnPropertyDescriptors(proto))) {
|
||||
if (!props.has(name)) props.set(name, prop)
|
||||
}
|
||||
} while ((proto = Object.getPrototypeOf(proto)))
|
||||
const getters = new Set<string>()
|
||||
for (const [name, prop] of props.entries()) {
|
||||
if (prop.get != null && prop.configurable && !debugHideFields.includes(name)) {
|
||||
getters.add(name)
|
||||
}
|
||||
}
|
||||
return [...getters]
|
||||
})
|
||||
}
|
||||
|
||||
const debugHideFields = [
|
||||
'_v',
|
||||
'__proto__',
|
||||
'codeReprBegin',
|
||||
'codeReprLen',
|
||||
'leftOffsetCodeReprBegin',
|
||||
'leftOffsetCodeReprLen',
|
||||
'leftOffsetVisible',
|
||||
'spanLeftOffsetCodeReprBegin',
|
||||
'spanLeftOffsetCodeReprLen',
|
||||
'spanLeftOffsetVisible',
|
||||
]
|
||||
|
@ -34,7 +34,11 @@ class ArgumentFactory {
|
||||
)
|
||||
}
|
||||
|
||||
argument(ast: Ast.Ast, index: number | undefined, info: SuggestionEntryArgument | undefined) {
|
||||
argument(
|
||||
ast: Ast.Expression,
|
||||
index: number | undefined,
|
||||
info: SuggestionEntryArgument | undefined,
|
||||
) {
|
||||
return new ArgumentAst(
|
||||
this.callId,
|
||||
this.kind,
|
||||
@ -51,7 +55,7 @@ class ArgumentFactory {
|
||||
}
|
||||
|
||||
type ArgWidgetConfiguration = WidgetConfiguration & { display?: DisplayMode }
|
||||
type WidgetInputValue = Ast.Ast | Ast.Token | string | undefined
|
||||
type WidgetInputValue = Ast.Expression | Ast.Token | string | undefined
|
||||
abstract class Argument {
|
||||
protected constructor(
|
||||
public callId: string,
|
||||
@ -127,7 +131,7 @@ export class ArgumentAst extends Argument {
|
||||
dynamicConfig: ArgWidgetConfiguration | undefined,
|
||||
index: number | undefined,
|
||||
argInfo: SuggestionEntryArgument | undefined,
|
||||
public ast: Ast.Ast,
|
||||
public ast: Ast.Expression,
|
||||
) {
|
||||
super(callId, kind, dynamicConfig, index, argInfo)
|
||||
}
|
||||
@ -149,24 +153,24 @@ interface InterpretedInfix {
|
||||
kind: 'infix'
|
||||
appTree: Ast.OprApp
|
||||
operator: Ast.Token | undefined
|
||||
lhs: Ast.Ast | undefined
|
||||
rhs: Ast.Ast | undefined
|
||||
lhs: Ast.Expression | undefined
|
||||
rhs: Ast.Expression | undefined
|
||||
}
|
||||
|
||||
interface InterpretedPrefix {
|
||||
kind: 'prefix'
|
||||
func: Ast.Ast
|
||||
func: Ast.Expression
|
||||
args: FoundApplication[]
|
||||
}
|
||||
|
||||
interface FoundApplication {
|
||||
appTree: Ast.App
|
||||
argument: Ast.Ast
|
||||
argument: Ast.Expression
|
||||
argName: string | undefined
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function interpretCall(callRoot: Ast.Ast): InterpretedCall {
|
||||
export function interpretCall(callRoot: Ast.Expression): InterpretedCall {
|
||||
if (callRoot instanceof Ast.OprApp) {
|
||||
// Infix chains are handled one level at a time. Each application may have at most 2 arguments.
|
||||
return {
|
||||
@ -209,8 +213,8 @@ interface CallInfo {
|
||||
/** TODO: Add docs */
|
||||
export class ArgumentApplication {
|
||||
private constructor(
|
||||
public appTree: Ast.Ast,
|
||||
public target: ArgumentApplication | Ast.Ast | ArgumentPlaceholder | ArgumentAst,
|
||||
public appTree: Ast.Expression,
|
||||
public target: ArgumentApplication | Ast.Expression | ArgumentPlaceholder | ArgumentAst,
|
||||
public infixOperator: Ast.Token | undefined,
|
||||
public argument: ArgumentAst | ArgumentPlaceholder,
|
||||
public calledFunction: SuggestionEntry | undefined,
|
||||
@ -260,7 +264,7 @@ export class ArgumentApplication {
|
||||
)
|
||||
|
||||
const resolvedArgs: Array<{
|
||||
appTree: Ast.Ast
|
||||
appTree: Ast.Expression
|
||||
argument: ArgumentAst | ArgumentPlaceholder
|
||||
}> = []
|
||||
|
||||
@ -378,7 +382,7 @@ export class ArgumentApplication {
|
||||
}
|
||||
|
||||
return resolvedArgs.reduce(
|
||||
(target: ArgumentApplication | Ast.Ast, toDisplay) =>
|
||||
(target: ArgumentApplication | Ast.Expression, toDisplay) =>
|
||||
new ArgumentApplication(
|
||||
toDisplay.appTree,
|
||||
target,
|
||||
@ -395,7 +399,7 @@ export class ArgumentApplication {
|
||||
static FromInterpretedWithInfo(
|
||||
interpreted: InterpretedCall,
|
||||
callInfo: CallInfo = {},
|
||||
): ArgumentApplication | Ast.Ast {
|
||||
): ArgumentApplication | Ast.Expression {
|
||||
if (interpreted.kind === 'infix') {
|
||||
return ArgumentApplication.FromInterpretedInfix(interpreted, callInfo)
|
||||
} else {
|
||||
@ -487,7 +491,7 @@ const unknownArgInfoNamed = (name: string) => ({
|
||||
})
|
||||
|
||||
/** TODO: Add docs */
|
||||
export function getAccessOprSubject(app: Ast.Ast): Ast.Ast | undefined {
|
||||
export function getAccessOprSubject(app: Ast.Expression): Ast.Expression | undefined {
|
||||
if (app instanceof Ast.PropertyAccess) return app.lhs
|
||||
}
|
||||
|
||||
@ -500,7 +504,7 @@ export function getAccessOprSubject(app: Ast.Ast): Ast.Ast | undefined {
|
||||
* We also don’t consider infix applications here, as using them inside a prefix chain would require additional syntax (like parenthesis).
|
||||
*/
|
||||
export function getMethodCallInfoRecursively(
|
||||
ast: Ast.Ast,
|
||||
ast: Ast.Expression,
|
||||
graphDb: { getMethodCallInfo(id: AstId): MethodCallInfo | undefined },
|
||||
): MethodCallInfo | undefined {
|
||||
let appliedArgs = 0
|
||||
|
@ -77,7 +77,7 @@ export function applyDocumentUpdates(
|
||||
// Update the metadata object.
|
||||
// Depth-first key order keeps diffs small.
|
||||
newMetadata = { node: {}, widget: {} }
|
||||
root.visitRecursiveAst(ast => {
|
||||
root.visitRecursive(ast => {
|
||||
let pos = ast.nodeMetadata.get('position')
|
||||
const vis = ast.nodeMetadata.get('visualization')
|
||||
const colorOverride = ast.nodeMetadata.get('colorOverride')
|
||||
|
@ -606,7 +606,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> {
|
||||
// the code was externally edited. In this case we try to fix the spans by running
|
||||
// the `syncToCode` on the saved code snapshot.
|
||||
const { root, spans } = Ast.parseModuleWithSpans(snapshotCode, syncModule)
|
||||
syncModule.syncRoot(root)
|
||||
syncModule.setRoot(root)
|
||||
parsedIdMap = deserializeIdMap(idMapJson)
|
||||
|
||||
const edit = syncModule.edit()
|
||||
@ -615,7 +615,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> {
|
||||
syncModule.applyEdit(edit)
|
||||
} else {
|
||||
const { root, spans } = Ast.parseModuleWithSpans(code, syncModule)
|
||||
syncModule.syncRoot(root)
|
||||
syncModule.setRoot(root)
|
||||
parsedSpans = spans
|
||||
}
|
||||
}
|
||||
@ -651,7 +651,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> {
|
||||
(nodeMeta.length !== 0 || widgetMeta.length !== 0)
|
||||
) {
|
||||
const externalIdToAst = new Map<ExternalId, Ast.Ast>()
|
||||
astRoot.visitRecursiveAst(ast => {
|
||||
astRoot.visitRecursive(ast => {
|
||||
if (!externalIdToAst.has(ast.externalId)) externalIdToAst.set(ast.externalId, ast)
|
||||
})
|
||||
const missing = new Set<string>()
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { Ast } from './tree'
|
||||
|
||||
/** Returns a GraphViz graph illustrating parent/child relationships in the given subtree. */
|
||||
export function graphParentPointers(ast: Ast) {
|
||||
export function graphParentPointers(ast: Ast, bidirectional?: true): string {
|
||||
const sanitize = (id: string) => id.replace('ast:', '').replace(/[^A-Za-z0-9]/g, '')
|
||||
const parentToChild = new Array<{ parent: string; child: string }>()
|
||||
const childToParent = new Array<{ child: string; parent: string }>()
|
||||
ast.visitRecursiveAst(ast => {
|
||||
ast.visitRecursive(ast => {
|
||||
for (const child of ast.children()) {
|
||||
if (child instanceof Ast)
|
||||
parentToChild.push({ child: sanitize(child.id), parent: sanitize(ast.id) })
|
||||
@ -15,8 +15,10 @@ export function graphParentPointers(ast: Ast) {
|
||||
})
|
||||
let result = 'digraph parentPointers {\n'
|
||||
for (const { parent, child } of parentToChild) result += `${parent} -> ${child};\n`
|
||||
for (const { child, parent } of childToParent)
|
||||
result += `${child} -> ${parent} [weight=0; color=red; style=dotted];\n`
|
||||
if (bidirectional) {
|
||||
for (const { child, parent } of childToParent)
|
||||
result += `${child} -> ${parent} [weight=0; color=red; style=dotted];\n`
|
||||
}
|
||||
result += '}\n'
|
||||
return result
|
||||
}
|
||||
|
@ -3,8 +3,8 @@ import { reachable } from '../util/data/graph'
|
||||
import type { ExternalId } from '../yjsModel'
|
||||
import type { Module } from './mutableModule'
|
||||
import type { SyncTokenId } from './token'
|
||||
import type { AstId } from './tree'
|
||||
import { App, Ast, Group, MutableAst, OprApp, Wildcard } from './tree'
|
||||
import type { AstId, MutableAst } from './tree'
|
||||
import { App, Ast, Group, OprApp, Wildcard } from './tree'
|
||||
|
||||
export * from './mutableModule'
|
||||
export * from './parse'
|
||||
|
@ -1,7 +1,19 @@
|
||||
import * as random from 'lib0/random'
|
||||
import * as Y from 'yjs'
|
||||
import type { AstId, NodeChild, Owned, RawNodeChild, SyncTokenId } from '.'
|
||||
import { Token, asOwned, isTokenId, newExternalId, subtreeRoots } from '.'
|
||||
import {
|
||||
AstId,
|
||||
MutableBodyBlock,
|
||||
NodeChild,
|
||||
Owned,
|
||||
RawNodeChild,
|
||||
SyncTokenId,
|
||||
Token,
|
||||
asOwned,
|
||||
isTokenId,
|
||||
newExternalId,
|
||||
parseModule,
|
||||
subtreeRoots,
|
||||
} from '.'
|
||||
import { assert, assertDefined } from '../util/assert'
|
||||
import type { SourceRangeEdit } from '../util/data/text'
|
||||
import { defaultLocalOrigin, tryAsOrigin, type ExternalId, type Origin } from '../yjsModel'
|
||||
@ -85,46 +97,36 @@ export class MutableModule implements Module {
|
||||
return this.ydoc.transact(f, origin)
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
/** Return the top-level block of the module. */
|
||||
root(): MutableAst | undefined {
|
||||
return this.rootPointer()?.expression
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
replaceRoot(newRoot: Owned | undefined): Owned | undefined {
|
||||
/** Set the given block to be the top-level block of the module. */
|
||||
setRoot(newRoot: Owned<MutableBodyBlock> | undefined) {
|
||||
if (newRoot) {
|
||||
const rootPointer = this.rootPointer()
|
||||
if (rootPointer) {
|
||||
return rootPointer.expression.replace(newRoot)
|
||||
rootPointer.expression.replace(newRoot)
|
||||
} else {
|
||||
invalidFields(this, this.baseObject('Invalid', undefined, ROOT_ID), {
|
||||
whitespace: '',
|
||||
node: newRoot,
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
} else {
|
||||
const oldRoot = this.root()
|
||||
if (!oldRoot) return
|
||||
this.nodes.delete(ROOT_ID)
|
||||
oldRoot.fields.set('parent', undefined)
|
||||
return asOwned(oldRoot)
|
||||
if (oldRoot) oldRoot.fields.set('parent', undefined)
|
||||
}
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
syncRoot(root: Owned) {
|
||||
this.replaceRoot(root)
|
||||
this.gc()
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
syncToCode(code: string) {
|
||||
const root = this.root()
|
||||
if (root) {
|
||||
root.syncToCode(code)
|
||||
} else {
|
||||
this.replaceRoot(Ast.parse(code, this))
|
||||
this.setRoot(parseModule(code, this))
|
||||
}
|
||||
}
|
||||
|
||||
@ -169,7 +171,7 @@ export class MutableModule implements Module {
|
||||
/** @internal */
|
||||
importCopy<T extends Ast>(ast: T): Owned<Mutable<T>> {
|
||||
assert(ast.module !== this)
|
||||
ast.visitRecursiveAst(ast => this.nodes.set(ast.id, ast.fields.clone() as any))
|
||||
ast.visitRecursive(ast => this.nodes.set(ast.id, ast.fields.clone() as any))
|
||||
const fields = this.nodes.get(ast.id)
|
||||
assertDefined(fields)
|
||||
fields.set('parent', undefined)
|
||||
@ -333,11 +335,6 @@ export class MutableModule implements Module {
|
||||
return this.replace(id, Wildcard.new(this)) || asOwned(this.get(id))
|
||||
}
|
||||
|
||||
/** TODO: Add docs */
|
||||
updateValue<T extends MutableAst>(id: AstId, f: (x: Owned) => Owned<T>): T | undefined {
|
||||
return this.tryGet(id)?.updateValue(f)
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////
|
||||
|
||||
/** TODO: Add docs */
|
||||
|
@ -1,15 +1,14 @@
|
||||
import * as map from 'lib0/map'
|
||||
import type {
|
||||
import {
|
||||
AstId,
|
||||
FunctionFields,
|
||||
Module,
|
||||
MutableInvalid,
|
||||
NodeChild,
|
||||
Owned,
|
||||
OwnedRefs,
|
||||
TextElement,
|
||||
TextToken,
|
||||
} from '.'
|
||||
import {
|
||||
Token,
|
||||
asOwned,
|
||||
isTokenId,
|
||||
@ -39,7 +38,6 @@ import {
|
||||
type SourceRange,
|
||||
type SourceRangeKey,
|
||||
} from '../yjsModel'
|
||||
import { graphParentPointers } from './debug'
|
||||
import { parse_block, parse_module, xxHash128 } from './ffi'
|
||||
import * as RawAst from './generated/ast'
|
||||
import { MutableModule } from './mutableModule'
|
||||
@ -50,7 +48,7 @@ import {
|
||||
Ast,
|
||||
AutoscopedIdentifier,
|
||||
BodyBlock,
|
||||
Documented,
|
||||
ExpressionStatement,
|
||||
Function,
|
||||
Generic,
|
||||
Group,
|
||||
@ -60,7 +58,10 @@ import {
|
||||
MutableAssignment,
|
||||
MutableAst,
|
||||
MutableBodyBlock,
|
||||
MutableExpression,
|
||||
MutableExpressionStatement,
|
||||
MutableIdent,
|
||||
MutableStatement,
|
||||
NegationApp,
|
||||
NumericLiteral,
|
||||
OprApp,
|
||||
@ -95,7 +96,7 @@ export function normalize(rootIn: Ast): Ast {
|
||||
const module = MutableModule.Transient()
|
||||
const tree = rawParseModule(printed.code)
|
||||
const { root: parsed, spans } = abstract(module, tree, printed.code)
|
||||
module.replaceRoot(parsed)
|
||||
module.setRoot(parsed)
|
||||
setExternalIds(module, spans, idMap)
|
||||
return parsed
|
||||
}
|
||||
@ -154,6 +155,20 @@ class Abstractor {
|
||||
this.toRaw = new Map()
|
||||
}
|
||||
|
||||
abstractStatement(tree: RawAst.Tree): {
|
||||
whitespace: string | undefined
|
||||
node: Owned<MutableStatement>
|
||||
} {
|
||||
return this.abstractTree(tree) as any
|
||||
}
|
||||
|
||||
abstractExpression(tree: RawAst.Tree): {
|
||||
whitespace: string | undefined
|
||||
node: Owned<MutableExpression>
|
||||
} {
|
||||
return this.abstractTree(tree) as any
|
||||
}
|
||||
|
||||
abstractTree(tree: RawAst.Tree): { whitespace: string | undefined; node: Owned } {
|
||||
const whitespaceStart = tree.whitespaceStartInCodeParsed
|
||||
const whitespaceEnd = whitespaceStart + tree.whitespaceLengthInCodeParsed
|
||||
@ -168,54 +183,14 @@ class Abstractor {
|
||||
case RawAst.Tree.Type.BodyBlock: {
|
||||
const lines = Array.from(tree.statements, line => {
|
||||
const newline = this.abstractToken(line.newline)
|
||||
const expression = line.expression ? this.abstractTree(line.expression) : undefined
|
||||
return { newline, expression }
|
||||
const statement = line.expression ? this.abstractStatement(line.expression) : undefined
|
||||
return { newline, statement }
|
||||
})
|
||||
node = BodyBlock.concrete(this.module, lines)
|
||||
break
|
||||
}
|
||||
case RawAst.Tree.Type.Function: {
|
||||
const annotationLines = Array.from(tree.annotationLines, anno => ({
|
||||
annotation: {
|
||||
operator: this.abstractToken(anno.annotation.operator),
|
||||
annotation: this.abstractToken(anno.annotation.annotation),
|
||||
argument: anno.annotation.argument && this.abstractTree(anno.annotation.argument),
|
||||
},
|
||||
newlines: Array.from(anno.newlines, this.abstractToken.bind(this)),
|
||||
}))
|
||||
const signatureLine = tree.signatureLine && {
|
||||
signature: this.abstractTypeSignature(tree.signatureLine.signature),
|
||||
newlines: Array.from(tree.signatureLine.newlines, this.abstractToken.bind(this)),
|
||||
}
|
||||
const private_ = tree.private && this.abstractToken(tree.private)
|
||||
const name = this.abstractTree(tree.name)
|
||||
const argumentDefinitions = Array.from(tree.args, arg => ({
|
||||
open: arg.open && this.abstractToken(arg.open),
|
||||
open2: arg.open2 && this.abstractToken(arg.open2),
|
||||
suspension: arg.suspension && this.abstractToken(arg.suspension),
|
||||
pattern: this.abstractTree(arg.pattern),
|
||||
type: arg.typeNode && {
|
||||
operator: this.abstractToken(arg.typeNode.operator),
|
||||
type: this.abstractTree(arg.typeNode.typeNode),
|
||||
},
|
||||
close2: arg.close2 && this.abstractToken(arg.close2),
|
||||
defaultValue: arg.default && {
|
||||
equals: this.abstractToken(arg.default.equals),
|
||||
expression: this.abstractTree(arg.default.expression),
|
||||
},
|
||||
close: arg.close && this.abstractToken(arg.close),
|
||||
}))
|
||||
const equals = this.abstractToken(tree.equals)
|
||||
const body = tree.body !== undefined ? this.abstractTree(tree.body) : undefined
|
||||
node = Function.concrete(this.module, {
|
||||
annotationLines,
|
||||
signatureLine,
|
||||
private_,
|
||||
name,
|
||||
argumentDefinitions,
|
||||
equals,
|
||||
body,
|
||||
} satisfies FunctionFields<OwnedRefs>)
|
||||
node = this.abstractFunction(tree)
|
||||
break
|
||||
}
|
||||
case RawAst.Tree.Type.Ident: {
|
||||
@ -224,24 +199,25 @@ class Abstractor {
|
||||
break
|
||||
}
|
||||
case RawAst.Tree.Type.Assignment: {
|
||||
const pattern = this.abstractTree(tree.pattern)
|
||||
const docLine = tree.docLine && this.abstractDocLine(tree.docLine)
|
||||
const pattern = this.abstractExpression(tree.pattern)
|
||||
const equals = this.abstractToken(tree.equals)
|
||||
const value = this.abstractTree(tree.expr)
|
||||
node = Assignment.concrete(this.module, pattern, equals, value)
|
||||
const value = this.abstractExpression(tree.expr)
|
||||
node = Assignment.concrete(this.module, docLine, pattern, equals, value)
|
||||
break
|
||||
}
|
||||
case RawAst.Tree.Type.App: {
|
||||
const func = this.abstractTree(tree.func)
|
||||
const arg = this.abstractTree(tree.arg)
|
||||
const func = this.abstractExpression(tree.func)
|
||||
const arg = this.abstractExpression(tree.arg)
|
||||
node = App.concrete(this.module, func, undefined, undefined, arg)
|
||||
break
|
||||
}
|
||||
case RawAst.Tree.Type.NamedApp: {
|
||||
const func = this.abstractTree(tree.func)
|
||||
const func = this.abstractExpression(tree.func)
|
||||
const open = tree.open ? this.abstractToken(tree.open) : undefined
|
||||
const name = this.abstractToken(tree.name)
|
||||
const equals = this.abstractToken(tree.equals)
|
||||
const arg = this.abstractTree(tree.arg)
|
||||
const arg = this.abstractExpression(tree.arg)
|
||||
const close = tree.close ? this.abstractToken(tree.close) : undefined
|
||||
const parens = open && close ? { open, close } : undefined
|
||||
const nameSpecification = { name, equals }
|
||||
@ -250,7 +226,7 @@ class Abstractor {
|
||||
}
|
||||
case RawAst.Tree.Type.UnaryOprApp: {
|
||||
const opr = this.abstractToken(tree.opr)
|
||||
const arg = tree.rhs ? this.abstractTree(tree.rhs) : undefined
|
||||
const arg = tree.rhs ? this.abstractExpression(tree.rhs) : undefined
|
||||
if (arg && opr.node.code() === '-') {
|
||||
node = NegationApp.concrete(this.module, opr, arg)
|
||||
} else {
|
||||
@ -265,12 +241,12 @@ class Abstractor {
|
||||
break
|
||||
}
|
||||
case RawAst.Tree.Type.OprApp: {
|
||||
const lhs = tree.lhs ? this.abstractTree(tree.lhs) : undefined
|
||||
const lhs = tree.lhs ? this.abstractExpression(tree.lhs) : undefined
|
||||
const opr =
|
||||
tree.opr.ok ?
|
||||
[this.abstractToken(tree.opr.value)]
|
||||
: Array.from(tree.opr.error.payload.operators, this.abstractToken.bind(this))
|
||||
const rhs = tree.rhs ? this.abstractTree(tree.rhs) : undefined
|
||||
const rhs = tree.rhs ? this.abstractExpression(tree.rhs) : undefined
|
||||
const soleOpr = tryGetSoleValue(opr)
|
||||
if (soleOpr?.node.code() === '.' && rhs?.node instanceof MutableIdent) {
|
||||
// Propagate type.
|
||||
@ -302,7 +278,7 @@ class Abstractor {
|
||||
// (which makes it impossible to give them unique IDs in the current IdMap format).
|
||||
case RawAst.Tree.Type.OprSectionBoundary:
|
||||
case RawAst.Tree.Type.TemplateFunction:
|
||||
return { whitespace, node: this.abstractTree(tree.ast).node }
|
||||
return { whitespace, node: this.abstractExpression(tree.ast).node }
|
||||
case RawAst.Tree.Type.Invalid: {
|
||||
const expression = this.abstractTree(tree.ast)
|
||||
node = Invalid.concrete(this.module, expression)
|
||||
@ -310,7 +286,7 @@ class Abstractor {
|
||||
}
|
||||
case RawAst.Tree.Type.Group: {
|
||||
const open = tree.open ? this.abstractToken(tree.open) : undefined
|
||||
const expression = tree.body ? this.abstractTree(tree.body) : undefined
|
||||
const expression = tree.body ? this.abstractExpression(tree.body) : undefined
|
||||
const close = tree.close ? this.abstractToken(tree.close) : undefined
|
||||
node = Group.concrete(this.module, open, expression, close)
|
||||
break
|
||||
@ -323,18 +299,16 @@ class Abstractor {
|
||||
node = TextLiteral.concrete(this.module, open, newline, elements, close)
|
||||
break
|
||||
}
|
||||
case RawAst.Tree.Type.Documented: {
|
||||
const open = this.abstractToken(tree.documentation.open)
|
||||
const elements = Array.from(tree.documentation.elements, this.abstractTextToken.bind(this))
|
||||
const newlines = Array.from(tree.documentation.newlines, this.abstractToken.bind(this))
|
||||
const expression = tree.expression ? this.abstractTree(tree.expression) : undefined
|
||||
node = Documented.concrete(this.module, open, elements, newlines, expression)
|
||||
case RawAst.Tree.Type.ExpressionStatement: {
|
||||
const docLine = tree.docLine && this.abstractDocLine(tree.docLine)
|
||||
const expression = this.abstractExpression(tree.expression)
|
||||
node = ExpressionStatement.concrete(this.module, docLine, expression)
|
||||
break
|
||||
}
|
||||
case RawAst.Tree.Type.Import: {
|
||||
const recurseBody = (tree: RawAst.Tree) => {
|
||||
const body = this.abstractTree(tree)
|
||||
if (body.node instanceof Invalid && body.node.code() === '') return undefined
|
||||
const body = this.abstractExpression(tree)
|
||||
if (body.node instanceof MutableInvalid && body.node.code() === '') return undefined
|
||||
return body
|
||||
}
|
||||
const recurseSegment = (segment: RawAst.MultiSegmentAppSegment) => ({
|
||||
@ -353,12 +327,12 @@ class Abstractor {
|
||||
case RawAst.Tree.Type.Array: {
|
||||
const left = this.abstractToken(tree.left)
|
||||
const elements = []
|
||||
if (tree.first) elements.push({ value: this.abstractTree(tree.first) })
|
||||
if (tree.first) elements.push({ value: this.abstractExpression(tree.first) })
|
||||
else if (!tree.rest.next().done) elements.push({ value: undefined })
|
||||
for (const rawElement of tree.rest) {
|
||||
elements.push({
|
||||
delimiter: this.abstractToken(rawElement.operator),
|
||||
value: rawElement.body && this.abstractTree(rawElement.body),
|
||||
value: rawElement.body && this.abstractExpression(rawElement.body),
|
||||
})
|
||||
}
|
||||
const right = this.abstractToken(tree.right)
|
||||
@ -374,6 +348,52 @@ class Abstractor {
|
||||
return { node, whitespace }
|
||||
}
|
||||
|
||||
private abstractFunction(tree: RawAst.Tree.Function) {
|
||||
const docLine = tree.docLine && this.abstractDocLine(tree.docLine)
|
||||
const annotationLines = Array.from(tree.annotationLines, anno => ({
|
||||
annotation: {
|
||||
operator: this.abstractToken(anno.annotation.operator),
|
||||
annotation: this.abstractToken(anno.annotation.annotation),
|
||||
argument: anno.annotation.argument && this.abstractExpression(anno.annotation.argument),
|
||||
},
|
||||
newlines: Array.from(anno.newlines, this.abstractToken.bind(this)),
|
||||
}))
|
||||
const signatureLine = tree.signatureLine && {
|
||||
signature: this.abstractTypeSignature(tree.signatureLine.signature),
|
||||
newlines: Array.from(tree.signatureLine.newlines, this.abstractToken.bind(this)),
|
||||
}
|
||||
const private_ = tree.private && this.abstractToken(tree.private)
|
||||
const name = this.abstractExpression(tree.name)
|
||||
const argumentDefinitions = Array.from(tree.args, arg => ({
|
||||
open: arg.open && this.abstractToken(arg.open),
|
||||
open2: arg.open2 && this.abstractToken(arg.open2),
|
||||
suspension: arg.suspension && this.abstractToken(arg.suspension),
|
||||
pattern: this.abstractExpression(arg.pattern),
|
||||
type: arg.typeNode && {
|
||||
operator: this.abstractToken(arg.typeNode.operator),
|
||||
type: this.abstractExpression(arg.typeNode.typeNode),
|
||||
},
|
||||
close2: arg.close2 && this.abstractToken(arg.close2),
|
||||
defaultValue: arg.default && {
|
||||
equals: this.abstractToken(arg.default.equals),
|
||||
expression: this.abstractExpression(arg.default.expression),
|
||||
},
|
||||
close: arg.close && this.abstractToken(arg.close),
|
||||
}))
|
||||
const equals = this.abstractToken(tree.equals)
|
||||
const body = tree.body !== undefined ? this.abstractExpression(tree.body) : undefined
|
||||
return Function.concrete(this.module, {
|
||||
docLine,
|
||||
annotationLines,
|
||||
signatureLine,
|
||||
private_,
|
||||
name,
|
||||
argumentDefinitions,
|
||||
equals,
|
||||
body,
|
||||
} satisfies FunctionFields<OwnedRefs>)
|
||||
}
|
||||
|
||||
private abstractToken(token: RawAst.Token): { whitespace: string; node: Token } {
|
||||
const whitespaceStart = token.whitespaceStartInCodeBuffer
|
||||
const whitespaceEnd = whitespaceStart + token.whitespaceLengthInCodeBuffer
|
||||
@ -412,7 +432,7 @@ class Abstractor {
|
||||
return {
|
||||
type: 'splice',
|
||||
open: this.abstractToken(raw.open),
|
||||
expression: raw.expression && this.abstractTree(raw.expression),
|
||||
expression: raw.expression && this.abstractExpression(raw.expression),
|
||||
close: this.abstractToken(raw.close),
|
||||
}
|
||||
}
|
||||
@ -440,9 +460,19 @@ class Abstractor {
|
||||
|
||||
private abstractTypeSignature(signature: RawAst.TypeSignature) {
|
||||
return {
|
||||
name: this.abstractTree(signature.name),
|
||||
name: this.abstractExpression(signature.name),
|
||||
operator: this.abstractToken(signature.operator),
|
||||
type: this.abstractTree(signature.typeNode),
|
||||
type: this.abstractExpression(signature.typeNode),
|
||||
}
|
||||
}
|
||||
|
||||
private abstractDocLine(docLine: RawAst.DocLine) {
|
||||
return {
|
||||
docs: {
|
||||
open: this.abstractToken(docLine.docs.open),
|
||||
elements: Array.from(docLine.docs.elements, this.abstractTextToken.bind(this)),
|
||||
},
|
||||
newlines: Array.from(docLine.newlines, this.abstractToken.bind(this)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -512,144 +542,81 @@ export function print(ast: Ast): PrintedSource {
|
||||
nodes: new Map(),
|
||||
tokens: new Map(),
|
||||
}
|
||||
const code = ast.printSubtree(info, 0, undefined)
|
||||
const code = ast.printSubtree(info, 0, null)
|
||||
return { info, code }
|
||||
}
|
||||
|
||||
/**
|
||||
* Used by `Ast.printSubtree`. Note that some AST types have overrides.
|
||||
* Used by `Ast.printSubtree`.
|
||||
* @internal
|
||||
*/
|
||||
export function printAst(
|
||||
ast: Ast,
|
||||
info: SpanMap,
|
||||
offset: number,
|
||||
parentIndent: string | undefined,
|
||||
verbatim?: boolean,
|
||||
parentIndent: string | null,
|
||||
verbatim: boolean = false,
|
||||
): string {
|
||||
let code = ''
|
||||
for (const child of ast.concreteChildren(verbatim)) {
|
||||
let currentLineIndent = parentIndent
|
||||
let prevIsNewline = false
|
||||
let isFirstToken = offset === 0
|
||||
for (const child of ast.concreteChildren({ verbatim, indent: parentIndent })) {
|
||||
if (!isTokenId(child.node) && ast.module.get(child.node) === undefined) continue
|
||||
if (child.whitespace != null) {
|
||||
code += child.whitespace
|
||||
} else if (code.length != 0) {
|
||||
code += ' '
|
||||
if (prevIsNewline) currentLineIndent = child.whitespace
|
||||
const token = isTokenId(child.node) ? ast.module.getToken(child.node) : undefined
|
||||
// Every line in a block starts with a newline token. In an AST produced by the parser, the newline token at the
|
||||
// first line of a module is zero-length. In order to handle whitespace correctly if the lines of a module are
|
||||
// rearranged, if a zero-length newline is encountered within a block, it is printed as an ordinary newline
|
||||
// character, and if an ordinary newline is found at the beginning of the output, it is not printed; however if the
|
||||
// output begins with a newline including a (plain) comment, we print the line as we would in any other block.
|
||||
if (
|
||||
token?.tokenType_ == RawAst.Token.Type.Newline &&
|
||||
isFirstToken &&
|
||||
(!token.code_ || token.code_ === '\n')
|
||||
) {
|
||||
prevIsNewline = true
|
||||
isFirstToken = false
|
||||
continue
|
||||
}
|
||||
if (isTokenId(child.node)) {
|
||||
code += child.whitespace
|
||||
if (token) {
|
||||
const tokenStart = offset + code.length
|
||||
const token = ast.module.getToken(child.node)
|
||||
const span = tokenKey(tokenStart, token.code().length)
|
||||
prevIsNewline = token.tokenType_ == RawAst.Token.Type.Newline
|
||||
let tokenCode = token.code_
|
||||
if (token.tokenType_ == RawAst.Token.Type.Newline) {
|
||||
tokenCode = tokenCode || '\n'
|
||||
}
|
||||
const span = tokenKey(tokenStart, tokenCode.length)
|
||||
info.tokens.set(span, token)
|
||||
code += token.code()
|
||||
code += tokenCode
|
||||
} else {
|
||||
assert(!isTokenId(child.node))
|
||||
prevIsNewline = false
|
||||
const childNode = ast.module.get(child.node)
|
||||
code += childNode.printSubtree(info, offset + code.length, parentIndent, verbatim)
|
||||
code += childNode.printSubtree(info, offset + code.length, currentLineIndent, verbatim)
|
||||
// Extra structural validation.
|
||||
assertEqual(childNode.id, child.node)
|
||||
if (parentId(childNode) !== ast.id) {
|
||||
console.error(
|
||||
`Inconsistent parent pointer (expected ${ast.id})`,
|
||||
childNode,
|
||||
graphParentPointers(ast.module.root()!),
|
||||
)
|
||||
console.error(`Inconsistent parent pointer (expected ${ast.id})`, childNode)
|
||||
}
|
||||
assertEqual(parentId(childNode), ast.id)
|
||||
}
|
||||
isFirstToken = false
|
||||
}
|
||||
const span = nodeKey(offset, code.length)
|
||||
// Adjustment to handle an edge case: A module starts with a zero-length newline token. If its first line is indented,
|
||||
// the initial whitespace belongs to the first line because it isn't hoisted past the (zero-length) newline to be the
|
||||
// leading whitespace for the block. In that case, our representation of the block contains leading whitespace at the
|
||||
// beginning, which must be excluded when calculating spans.
|
||||
const leadingWhitespace = code.match(/ */)?.[0].length ?? 0
|
||||
const span = nodeKey(offset + leadingWhitespace, code.length - leadingWhitespace)
|
||||
map.setIfUndefined(info.nodes, span, (): Ast[] => []).unshift(ast)
|
||||
return code
|
||||
}
|
||||
|
||||
/**
|
||||
* Use `Ast.code()' to stringify.
|
||||
* @internal
|
||||
*/
|
||||
export function printBlock(
|
||||
block: BodyBlock,
|
||||
info: SpanMap,
|
||||
offset: number,
|
||||
parentIndent: string | undefined,
|
||||
verbatim?: boolean,
|
||||
): string {
|
||||
let blockIndent: string | undefined
|
||||
let code = ''
|
||||
block.fields.get('lines').forEach((line, index) => {
|
||||
code += line.newline.whitespace ?? ''
|
||||
const newlineCode = block.module.getToken(line.newline.node).code()
|
||||
// Only print a newline if this isn't the first line in the output, or it's a comment.
|
||||
if (offset || index || newlineCode.startsWith('#')) {
|
||||
// If this isn't the first line in the output, but there is a concrete newline token:
|
||||
// if it's a zero-length newline, ignore it and print a normal newline.
|
||||
code += newlineCode || '\n'
|
||||
}
|
||||
if (line.expression) {
|
||||
if (blockIndent === undefined) {
|
||||
if ((line.expression.whitespace?.length ?? 0) > (parentIndent?.length ?? 0)) {
|
||||
blockIndent = line.expression.whitespace!
|
||||
} else if (parentIndent !== undefined) {
|
||||
blockIndent = parentIndent + ' '
|
||||
} else {
|
||||
blockIndent = ''
|
||||
}
|
||||
}
|
||||
const validIndent = (line.expression.whitespace?.length ?? 0) > (parentIndent?.length ?? 0)
|
||||
code += validIndent ? line.expression.whitespace : blockIndent
|
||||
const lineNode = block.module.get(line.expression.node)
|
||||
assertEqual(lineNode.id, line.expression.node)
|
||||
assertEqual(parentId(lineNode), block.id)
|
||||
code += lineNode.printSubtree(info, offset + code.length, blockIndent, verbatim)
|
||||
}
|
||||
})
|
||||
const span = nodeKey(offset, code.length)
|
||||
map.setIfUndefined(info.nodes, span, (): Ast[] => []).unshift(block)
|
||||
return code
|
||||
}
|
||||
|
||||
/**
|
||||
* Use `Ast.code()' to stringify.
|
||||
* @internal
|
||||
*/
|
||||
export function printDocumented(
|
||||
documented: Documented,
|
||||
info: SpanMap,
|
||||
offset: number,
|
||||
parentIndent: string | undefined,
|
||||
verbatim?: boolean,
|
||||
): string {
|
||||
const open = documented.fields.get('open')
|
||||
const topIndent = parentIndent ?? open.whitespace ?? ''
|
||||
let code = ''
|
||||
code += open.node.code_
|
||||
const minWhitespaceLength = topIndent.length + 1
|
||||
let preferredWhitespace = topIndent + ' '
|
||||
documented.fields.get('elements').forEach(({ token }, i) => {
|
||||
if (i === 0) {
|
||||
const whitespace = token.whitespace ?? ' '
|
||||
code += whitespace
|
||||
code += token.node.code_
|
||||
preferredWhitespace += whitespace
|
||||
} else if (token.node.tokenType_ === RawAst.Token.Type.TextSection) {
|
||||
if (token.whitespace && (verbatim || token.whitespace.length >= minWhitespaceLength))
|
||||
code += token.whitespace
|
||||
else code += preferredWhitespace
|
||||
code += token.node.code_
|
||||
} else {
|
||||
code += token.whitespace ?? ''
|
||||
code += token.node.code_
|
||||
}
|
||||
})
|
||||
code += documented.fields
|
||||
.get('newlines')
|
||||
.map(({ whitespace, node }) => (whitespace ?? '') + node.code_)
|
||||
.join('')
|
||||
if (documented.expression) {
|
||||
code += documented.fields.get('expression')?.whitespace ?? topIndent
|
||||
code += documented.expression.printSubtree(info, offset + code.length, topIndent, verbatim)
|
||||
}
|
||||
const span = nodeKey(offset, code.length)
|
||||
map.setIfUndefined(info.nodes, span, (): Ast[] => []).unshift(documented)
|
||||
return code
|
||||
/** Parse the input as a complete module. */
|
||||
export function parseModule(code: string, module?: MutableModule): Owned<MutableBodyBlock> {
|
||||
return parseModuleWithSpans(code, module).root
|
||||
}
|
||||
|
||||
/** Parse the input as a body block, not the top level of a module. */
|
||||
@ -659,19 +626,43 @@ export function parseBlock(code: string, module?: MutableModule): Owned<MutableB
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the input. If it contains a single expression at the top level, return it; otherwise, parse it as a body block.
|
||||
* Parse the input as a statement. If it cannot be parsed as a statement (e.g. it is invalid or a block), returns
|
||||
* `undefined`.
|
||||
*/
|
||||
export function parse(code: string, module?: MutableModule): Owned {
|
||||
export function parseStatement(
|
||||
code: string,
|
||||
module?: MutableModule,
|
||||
): Owned<MutableStatement> | undefined {
|
||||
const module_ = module ?? MutableModule.Transient()
|
||||
const ast = parseBlock(code, module)
|
||||
const soleStatement = tryGetSoleValue(ast.statements())
|
||||
if (!soleStatement) return ast
|
||||
if (!soleStatement) return
|
||||
const parent = parentId(soleStatement)
|
||||
if (parent) module_.delete(parent)
|
||||
soleStatement.fields.set('parent', undefined)
|
||||
return asOwned(soleStatement)
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the input as an expression. If it cannot be parsed as an expression (e.g. it is a statement or block), returns
|
||||
* `undefined`.
|
||||
*/
|
||||
export function parseExpression(
|
||||
code: string,
|
||||
module?: MutableModule,
|
||||
): Owned<MutableExpression> | undefined {
|
||||
const module_ = module ?? MutableModule.Transient()
|
||||
const ast = parseBlock(code, module)
|
||||
const soleStatement = tryGetSoleValue(ast.statements())
|
||||
if (!(soleStatement instanceof MutableExpressionStatement)) return undefined
|
||||
const expression = soleStatement.expression
|
||||
module_.delete(soleStatement.id)
|
||||
const parent = parentId(expression)
|
||||
if (parent) module_.delete(parent)
|
||||
expression.fields.set('parent', undefined)
|
||||
return asOwned(expression)
|
||||
}
|
||||
|
||||
/** Parse a module, and return it along with a mapping from source locations to parsed objects. */
|
||||
export function parseModuleWithSpans(
|
||||
code: string,
|
||||
@ -690,7 +681,7 @@ export function parseExtended(code: string, idMap?: IdMap | undefined, inModule?
|
||||
const module = inModule ?? MutableModule.Transient()
|
||||
const { root, spans, toRaw } = module.transact(() => {
|
||||
const { root, spans, toRaw } = abstract(module, rawRoot, code)
|
||||
root.module.replaceRoot(root)
|
||||
root.module.setRoot(root)
|
||||
if (idMap) setExternalIds(root.module, spans, idMap)
|
||||
return { root, spans, toRaw }
|
||||
})
|
||||
@ -702,7 +693,7 @@ export function parseExtended(code: string, idMap?: IdMap | undefined, inModule?
|
||||
/** Return the number of `Ast`s in the tree, including the provided root. */
|
||||
export function astCount(ast: Ast): number {
|
||||
let count = 0
|
||||
ast.visitRecursiveAst(_subtree => {
|
||||
ast.visitRecursive(_subtree => {
|
||||
count += 1
|
||||
})
|
||||
return count
|
||||
@ -784,11 +775,11 @@ export function repair(
|
||||
const fixes = module ?? root.module.edit()
|
||||
for (const ast of lostInline) {
|
||||
if (ast instanceof Group) continue
|
||||
fixes.getVersion(ast).update(ast => Group.new(fixes, ast))
|
||||
fixes.getVersion(ast).update(ast => Group.new(fixes, ast as any))
|
||||
}
|
||||
|
||||
// Verify that it's fixed.
|
||||
const printed2 = print(fixes.getVersion(root))
|
||||
const printed2 = print(fixes.root()!)
|
||||
const reparsed2 = parseModuleWithSpans(printed2.code)
|
||||
const { lostInline: lostInline2, lostBlock: lostBlock2 } = checkSpans(
|
||||
printed2.info.nodes,
|
||||
@ -851,7 +842,7 @@ function resync(
|
||||
function hashSubtreeSyntax(ast: Ast, hashesOut: Map<SyntaxHash, Ast[]>): SyntaxHash {
|
||||
let content = ''
|
||||
content += ast.typeName + ':'
|
||||
for (const child of ast.concreteChildren()) {
|
||||
for (const child of ast.concreteChildren({ verbatim: false, indent: '' })) {
|
||||
content += child.whitespace ?? '?'
|
||||
if (isTokenId(child.node)) {
|
||||
content += 'Token:' + hashString(ast.module.getToken(child.node).code())
|
||||
@ -885,12 +876,6 @@ function syntaxHash(root: Ast) {
|
||||
return { root: rootHash, hashes }
|
||||
}
|
||||
|
||||
/** If the input is a block containing a single expression, return the expression; otherwise return the input. */
|
||||
function rawBlockToInline(tree: RawAst.Tree.Tree) {
|
||||
if (tree.type !== RawAst.Tree.Type.BodyBlock) return tree
|
||||
return tryGetSoleValue(tree.statements)?.expression ?? tree
|
||||
}
|
||||
|
||||
/** Update `ast` to match the given source code, while modifying it as little as possible. */
|
||||
export function syncToCode(ast: MutableAst, code: string, metadataSource?: Module) {
|
||||
const codeBefore = ast.code()
|
||||
@ -984,9 +969,22 @@ export function applyTextEditsToAst(
|
||||
) {
|
||||
const printed = print(ast)
|
||||
const code = applyTextEdits(printed.code, textEdits)
|
||||
const rawParsedBlock = rawParseModule(code)
|
||||
const rawParsed =
|
||||
ast instanceof MutableBodyBlock ? rawParsedBlock : rawBlockToInline(rawParsedBlock)
|
||||
const astModuleRoot = ast.module.root()
|
||||
const rawParsedBlock =
|
||||
ast instanceof MutableBodyBlock && astModuleRoot && ast.is(astModuleRoot) ?
|
||||
rawParseModule(code)
|
||||
: rawParseBlock(code)
|
||||
const rawParsedStatement =
|
||||
ast instanceof MutableBodyBlock ? undefined : (
|
||||
tryGetSoleValue(rawParsedBlock.statements)?.expression
|
||||
)
|
||||
const rawParsedExpression =
|
||||
ast.isExpression() ?
|
||||
rawParsedStatement?.type === RawAst.Tree.Type.ExpressionStatement ?
|
||||
rawParsedStatement.expression
|
||||
: undefined
|
||||
: undefined
|
||||
const rawParsed = rawParsedExpression ?? rawParsedStatement ?? rawParsedBlock
|
||||
const parsed = abstract(ast.module, rawParsed, code)
|
||||
const toSync = calculateCorrespondence(
|
||||
ast,
|
||||
@ -1031,7 +1029,7 @@ function syncTree(
|
||||
target.fields.get('metadata').set('externalId', newExternalId())
|
||||
}
|
||||
const newRoot = syncRoot ? target : newContent
|
||||
newRoot.visitRecursiveAst(ast => {
|
||||
newRoot.visitRecursive(ast => {
|
||||
const syncFieldsFrom = toSync.get(ast.id)
|
||||
const editAst = edit.getVersion(ast)
|
||||
if (syncFieldsFrom) {
|
||||
|
@ -157,7 +157,7 @@ export function isOperator(code: string): code is Operator {
|
||||
|
||||
/** @internal */
|
||||
export function isTokenId(
|
||||
t: DeepReadonly<SyncTokenId | AstId | Ast | Owned<Ast> | Owned>,
|
||||
t: DeepReadonly<SyncTokenId | AstId | Ast | Owned>,
|
||||
): t is DeepReadonly<SyncTokenId> {
|
||||
return typeof t === 'object' && !(t instanceof Ast)
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -462,7 +462,7 @@ public class ErrorCompilerTest extends CompilerTests {
|
||||
public void illegalForeignBody4() throws Exception {
|
||||
var ir = parse("foreign js foo = 4");
|
||||
assertSingleSyntaxError(
|
||||
ir, Syntax.UnexpectedExpression$.MODULE$, "Unexpected expression", 0, 18);
|
||||
ir, new Syntax.InvalidForeignDefinition("Expected text literal as body"), null, 0, 18);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -2275,7 +2275,7 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers {
|
||||
|
|
||||
|## My sweet type
|
||||
|type Mtp
|
||||
| ## My sweet type
|
||||
| ## My sweet value
|
||||
| MyType a b""".stripMargin
|
||||
val module = code.preprocessModule()
|
||||
|
||||
@ -2306,7 +2306,7 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers {
|
||||
.Argument("b", SuggestionBuilder.Any, false, false, None)
|
||||
),
|
||||
returnType = "Test.Mtp",
|
||||
documentation = Some(" My sweet type"),
|
||||
documentation = Some(" My sweet value"),
|
||||
annotations = Seq()
|
||||
),
|
||||
Vector()
|
||||
|
@ -37,6 +37,7 @@ import org.enso.compiler.core.ir.module.scope.imports.Polyglot;
|
||||
import org.enso.syntax2.ArgumentDefinition;
|
||||
import org.enso.syntax2.Base;
|
||||
import org.enso.syntax2.DocComment;
|
||||
import org.enso.syntax2.DocLine;
|
||||
import org.enso.syntax2.FunctionAnnotation;
|
||||
import org.enso.syntax2.Line;
|
||||
import org.enso.syntax2.Parser;
|
||||
@ -170,21 +171,18 @@ final class TreeToIr {
|
||||
List<Diagnostic> diag = nil();
|
||||
for (Line line : module.getStatements()) {
|
||||
var expr = line.getExpression();
|
||||
// Documentation found among imports/exports or at the top of the module (if it starts with imports) is
|
||||
// placed in `bindings` because that's what the Scala parser used to do.
|
||||
while (expr instanceof Tree.Documented doc) {
|
||||
Definition c;
|
||||
try {
|
||||
c = translateComment(doc, doc.getDocumentation());
|
||||
} catch (SyntaxException ex) {
|
||||
c = ex.toError();
|
||||
}
|
||||
bindings = join(c, bindings);
|
||||
expr = doc.getExpression();
|
||||
}
|
||||
switch (expr) {
|
||||
case Tree.Import imp -> imports = join(translateImport(imp), imports);
|
||||
case Tree.Export exp -> exports = join(translateExport(exp), exports);
|
||||
case Tree.Documentation docs -> {
|
||||
Definition docsIr;
|
||||
try {
|
||||
docsIr = translateComment(docs, docs.getDocs());
|
||||
} catch (SyntaxException ex) {
|
||||
docsIr = ex.toError();
|
||||
}
|
||||
bindings = join(docsIr, bindings);
|
||||
}
|
||||
case Tree.Private priv -> {
|
||||
if (isPrivate) {
|
||||
var error = translateSyntaxError(priv, Syntax.UnexpectedExpression$.MODULE$);
|
||||
@ -245,22 +243,12 @@ final class TreeToIr {
|
||||
|
||||
case Tree.Function fn -> translateMethodBinding(fn, appendTo);
|
||||
|
||||
case Tree.ForeignFunction fn when fn.getBody() instanceof Tree.TextLiteral body -> {
|
||||
case Tree.ForeignFunction fn -> {
|
||||
var name = fn.getName();
|
||||
var nameLoc = getIdentifiedLocation(name);
|
||||
var methodRef = new Name.MethodReference(Option.empty(), buildName(name), nameLoc, meta());
|
||||
var args = translateArgumentsDefinition(fn.getArgs());
|
||||
var languageName = fn.getLanguage().codeRepr();
|
||||
var language = languageName;
|
||||
if (language == null) {
|
||||
var message = "Language '" + languageName + "' is not a supported polyglot language.";
|
||||
var error = translateSyntaxError(inputAst, new Syntax.InvalidForeignDefinition(message));
|
||||
yield join(error, appendTo);
|
||||
}
|
||||
var text = buildTextConstant(body, body.getElements());
|
||||
var def =
|
||||
new Foreign.Definition(language, text, getIdentifiedLocation(fn.getBody()), meta());
|
||||
// Foreign functions are always considered private
|
||||
var def = translateForeignFunction(fn);
|
||||
var binding = new Method.Binding(
|
||||
methodRef, args, true, def, getIdentifiedLocation(inputAst), meta());
|
||||
yield join(binding, appendTo);
|
||||
@ -272,11 +260,14 @@ final class TreeToIr {
|
||||
yield translateModuleSymbol(anno.getExpression(), join(annotation, appendTo));
|
||||
}
|
||||
|
||||
case Tree.Documented doc -> {
|
||||
var comment = translateComment(doc, doc.getDocumentation());
|
||||
yield translateModuleSymbol(doc.getExpression(), join(comment, appendTo));
|
||||
case Tree.ExpressionStatement statement -> {
|
||||
if (statement.getDocLine() instanceof DocLine docLine)
|
||||
appendTo = join(translateComment(statement, docLine.getDocs()), appendTo);
|
||||
yield translateModuleSymbol(statement.getExpression(), appendTo);
|
||||
}
|
||||
|
||||
case Tree.Documentation doc -> join(translateComment(doc, doc.getDocs()), appendTo);
|
||||
|
||||
case Tree.TypeSignatureDeclaration sig -> {
|
||||
var ascription = translateMethodTypeSignature(sig.getSignature());
|
||||
yield join(ascription, appendTo);
|
||||
@ -289,6 +280,22 @@ final class TreeToIr {
|
||||
};
|
||||
}
|
||||
|
||||
private Expression translateForeignFunction(Tree.ForeignFunction fn) throws SyntaxException {
|
||||
var languageName = fn.getLanguage().codeRepr();
|
||||
var language = languageName;
|
||||
if (language == null) {
|
||||
var message = "Language '" + languageName + "' is not a supported polyglot language.";
|
||||
return translateSyntaxError(fn, new Syntax.InvalidForeignDefinition(message));
|
||||
}
|
||||
String text;
|
||||
if (fn.getBody() instanceof Tree.TextLiteral body) {
|
||||
text = buildTextConstant(body, body.getElements());
|
||||
} else {
|
||||
return translateSyntaxError(fn, new Syntax.InvalidForeignDefinition("Expected text literal as body"));
|
||||
}
|
||||
return new Foreign.Definition(language, text, getIdentifiedLocation(fn.getBody()), meta());
|
||||
}
|
||||
|
||||
private List<DefinitionArgument> translateArgumentsDefinition(
|
||||
java.util.List<ArgumentDefinition> args) throws SyntaxException {
|
||||
List<DefinitionArgument> res = nil();
|
||||
@ -300,6 +307,15 @@ final class TreeToIr {
|
||||
}
|
||||
|
||||
List<IR> translateConstructorDefinition(Tree.ConstructorDefinition cons, List<IR> appendTo) {
|
||||
if (cons.getDocLine() instanceof DocLine docLine) {
|
||||
Definition docsIr;
|
||||
try {
|
||||
docsIr = translateComment(cons, docLine.getDocs());
|
||||
} catch (SyntaxException ex) {
|
||||
docsIr = ex.toError();
|
||||
}
|
||||
appendTo = join(docsIr, appendTo);
|
||||
}
|
||||
for (var annoLine : cons.getAnnotationLines()) {
|
||||
appendTo = join(translateAnnotation(annoLine.getAnnotation()), appendTo);
|
||||
}
|
||||
@ -360,26 +376,21 @@ final class TreeToIr {
|
||||
|
||||
case Tree.Function fun -> translateTypeMethodBinding(fun, appendTo);
|
||||
|
||||
case Tree.ForeignFunction fn when fn.getBody() instanceof Tree.TextLiteral body -> {
|
||||
case Tree.ForeignFunction fn -> {
|
||||
var name = buildName(fn.getName());
|
||||
var args = translateArgumentsDefinition(fn.getArgs());
|
||||
var languageName = fn.getLanguage().codeRepr();
|
||||
var language = languageName;
|
||||
if (language == null) {
|
||||
var message = "Language '" + languageName + "' is not a supported polyglot language.";
|
||||
var error = translateSyntaxError(inputAst, new Syntax.InvalidForeignDefinition(message));
|
||||
yield join(error, appendTo);
|
||||
}
|
||||
var text = buildTextConstant(body, body.getElements());
|
||||
var def =
|
||||
new Foreign.Definition(language, text, getIdentifiedLocation(fn.getBody()), meta());
|
||||
var def = translateForeignFunction(fn);
|
||||
var binding =
|
||||
new Function.Binding(name, args, def, false, getIdentifiedLocation(fn), true, meta());
|
||||
yield join(binding, appendTo);
|
||||
}
|
||||
case Tree.Documented doc -> {
|
||||
var irDoc = translateComment(doc, doc.getDocumentation());
|
||||
yield translateTypeBodyExpression(doc.getExpression(), join(irDoc, appendTo));
|
||||
|
||||
case Tree.Documentation docs -> join(translateComment(docs, docs.getDocs()), appendTo);
|
||||
|
||||
case Tree.ExpressionStatement statement -> {
|
||||
if (statement.getDocLine() instanceof DocLine docLine)
|
||||
appendTo = join(translateComment(statement, docLine.getDocs()), appendTo);
|
||||
yield translateTypeBodyExpression(statement.getExpression(), appendTo);
|
||||
}
|
||||
|
||||
case Tree.AnnotatedBuiltin anno -> {
|
||||
@ -459,6 +470,9 @@ final class TreeToIr {
|
||||
|
||||
private List<Definition> translateMethodBinding(Tree.Function fn, List<Definition> appendTo)
|
||||
throws SyntaxException {
|
||||
if (fn.getDocLine() instanceof DocLine docLine) {
|
||||
appendTo = join(translateComment(fn, docLine.getDocs()), appendTo);
|
||||
}
|
||||
for (var annoLine : fn.getAnnotationLines()) {
|
||||
appendTo = join(translateAnnotation(annoLine.getAnnotation()), appendTo);
|
||||
}
|
||||
@ -494,6 +508,15 @@ final class TreeToIr {
|
||||
}
|
||||
|
||||
private List<IR> translateTypeMethodBinding(Tree.Function fun, List<IR> appendTo) {
|
||||
if (fun.getDocLine() instanceof DocLine docLine) {
|
||||
IR comment;
|
||||
try {
|
||||
comment = translateComment(fun, docLine.getDocs());
|
||||
} catch (SyntaxException ex) {
|
||||
comment = ex.toError();
|
||||
}
|
||||
appendTo = join(comment, appendTo);
|
||||
}
|
||||
for (var annoLine : fun.getAnnotationLines()) {
|
||||
appendTo = join(translateAnnotation(annoLine.getAnnotation()), appendTo);
|
||||
}
|
||||
@ -972,8 +995,8 @@ final class TreeToIr {
|
||||
continue;
|
||||
}
|
||||
var branch = line.getCase();
|
||||
if (branch.getDocumentation() != null) {
|
||||
var comment = translateComment(cas, branch.getDocumentation());
|
||||
if (branch.getDocLine() instanceof DocLine docLine) {
|
||||
var comment = translateComment(cas, docLine.getDocs());
|
||||
var loc = getIdentifiedLocation(cas);
|
||||
var doc = new Pattern.Documentation(comment.doc(), loc, meta());
|
||||
var br = new Case.Branch(
|
||||
@ -1023,9 +1046,7 @@ final class TreeToIr {
|
||||
getIdentifiedLocation(anno), meta());
|
||||
yield translateBuiltinAnnotation(ir, anno.getExpression(), nil());
|
||||
}
|
||||
// Documentation can be attached to an expression in a few cases, like if someone documents a line of an
|
||||
// `ArgumentBlockApplication`. The documentation is ignored.
|
||||
case Tree.Documented docu -> translateExpression(docu.getExpression());
|
||||
|
||||
case Tree.App app -> {
|
||||
var fn = translateExpression(app.getFunc(), isMethod);
|
||||
var loc = getIdentifiedLocation(app);
|
||||
@ -1099,10 +1120,28 @@ final class TreeToIr {
|
||||
private void translateBlockStatement(Tree tree, Collection<Expression> appendTo) {
|
||||
switch (tree) {
|
||||
case null -> {}
|
||||
case Tree.Assignment assign -> {
|
||||
appendTo.add(translateAssignment(assign));
|
||||
case Tree.Assignment assignment -> {
|
||||
if (assignment.getDocLine() instanceof DocLine docLine) {
|
||||
Expression docsIr;
|
||||
try {
|
||||
docsIr = translateComment(assignment, docLine.getDocs());
|
||||
} catch (SyntaxException ex) {
|
||||
docsIr = ex.toError();
|
||||
}
|
||||
appendTo.add(docsIr);
|
||||
}
|
||||
appendTo.add(translateAssignment(assignment));
|
||||
}
|
||||
case Tree.Function fun -> {
|
||||
if (fun.getDocLine() instanceof DocLine docLine) {
|
||||
Expression comment;
|
||||
try {
|
||||
comment = translateComment(fun, docLine.getDocs());
|
||||
} catch (SyntaxException ex) {
|
||||
comment = ex.toError();
|
||||
}
|
||||
appendTo.add(comment);
|
||||
}
|
||||
for (var annoLine : fun.getAnnotationLines()) {
|
||||
appendTo.add((Expression)translateAnnotation(annoLine.getAnnotation()));
|
||||
}
|
||||
@ -1114,15 +1153,26 @@ final class TreeToIr {
|
||||
case Tree.TypeSignatureDeclaration sig -> {
|
||||
appendTo.add(translateTypeSignatureToOprApp(sig.getSignature()));
|
||||
}
|
||||
case Tree.Documented doc -> {
|
||||
case Tree.Documentation docs -> {
|
||||
Expression ir;
|
||||
try {
|
||||
ir = translateComment(doc, doc.getDocumentation());
|
||||
ir = translateComment(docs, docs.getDocs());
|
||||
} catch (SyntaxException ex) {
|
||||
ir = ex.toError();
|
||||
}
|
||||
appendTo.add(ir);
|
||||
translateBlockStatement(doc.getExpression(), appendTo);
|
||||
}
|
||||
case Tree.ExpressionStatement statement -> {
|
||||
if (statement.getDocLine() instanceof DocLine docLine) {
|
||||
Expression ir;
|
||||
try {
|
||||
ir = translateComment(statement, docLine.getDocs());
|
||||
} catch (SyntaxException ex) {
|
||||
ir = ex.toError();
|
||||
}
|
||||
appendTo.add(ir);
|
||||
}
|
||||
translateBlockStatement(statement.getExpression(), appendTo);
|
||||
}
|
||||
default -> {
|
||||
var expressionStatement = translateExpression(tree);
|
||||
@ -1203,7 +1253,7 @@ final class TreeToIr {
|
||||
case Tree.OprSectionBoundary section -> section.getAst();
|
||||
case Tree.TemplateFunction function -> function.getAst();
|
||||
case Tree.AnnotatedBuiltin annotated -> annotated.getExpression();
|
||||
case Tree.Documented documented -> documented.getExpression();
|
||||
case Tree.ExpressionStatement statement -> statement.getExpression();
|
||||
case Tree.Assignment assignment -> assignment.getExpr();
|
||||
case Tree.TypeAnnotated annotated -> annotated.getExpression();
|
||||
case Tree.App app when isApplication(app.getFunc()) -> app.getFunc();
|
||||
@ -1217,6 +1267,7 @@ final class TreeToIr {
|
||||
case Tree.CaseOf ignored -> null;
|
||||
case Tree.Array ignored -> null;
|
||||
case Tree.Tuple ignored -> null;
|
||||
case Tree.Documentation ignored -> null;
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
@ -233,7 +233,9 @@ export default [
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-namespace': 'off',
|
||||
'@typescript-eslint/no-empty-object-type': ['error'],
|
||||
// Empty interfaces have valid uses; e.g. although an empty interface extending a class is semantically equivalent
|
||||
// to a type alias, it is not resolved by IDEs to the base type (which may be internal).
|
||||
'@typescript-eslint/no-empty-object-type': ['error', { allowInterfaces: 'always' }],
|
||||
'no-unused-labels': 'off',
|
||||
// Taken care of by prettier
|
||||
'vue/max-attributes-per-line': 'off',
|
||||
|
@ -95,15 +95,29 @@ where T: serde::Serialize + Reflect {
|
||||
let (car, _) = cons.into_pair();
|
||||
Value::cons(car, Value::Null)
|
||||
};
|
||||
let simplify_tree = |list: Value| {
|
||||
let list = strip_hidden_fields(list);
|
||||
let vec = list.to_vec().unwrap();
|
||||
if vec[0].as_symbol().unwrap() == "ExpressionStatement" {
|
||||
match &vec[1..] {
|
||||
[Value::Cons(doc_line), Value::Cons(expr)] if doc_line.cdr().is_null() =>
|
||||
return expr.cdr().to_owned(),
|
||||
_ => {}
|
||||
}
|
||||
};
|
||||
list
|
||||
};
|
||||
let line = rust_to_meta[&tree::block::Line::reflect().id];
|
||||
let operator_line = rust_to_meta[&tree::block::OperatorLine::reflect().id];
|
||||
let type_signature_line = rust_to_meta[&tree::TypeSignatureLine::reflect().id];
|
||||
let invalid = rust_to_meta[&tree::Invalid::reflect().id];
|
||||
let tree = rust_to_meta[&tree::Tree::reflect().id];
|
||||
to_s_expr.mapper(line, into_car);
|
||||
to_s_expr.mapper(operator_line, into_car);
|
||||
to_s_expr.mapper(type_signature_line, into_car);
|
||||
to_s_expr.mapper(invalid, strip_invalid);
|
||||
to_s_expr.mapper(text_escape_token, simplify_escape);
|
||||
to_s_expr.mapper(tree, simplify_tree);
|
||||
tuplify(to_s_expr.value(ast_ty, &value))
|
||||
}
|
||||
|
||||
|
@ -156,31 +156,44 @@ fn plain_comments() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doc_comments() {
|
||||
let lines = [
|
||||
"## The Identity Function",
|
||||
"",
|
||||
" Arguments:",
|
||||
" - x: value to do nothing to",
|
||||
"id x = x",
|
||||
];
|
||||
test!(lines.join("\n"),
|
||||
(Documented
|
||||
(#((Section " The Identity Function") (Newline)
|
||||
(Newline)
|
||||
(Section "Arguments:") (Newline)
|
||||
(Section "- x: value to do nothing to"))
|
||||
#(()))
|
||||
,(Function::new("id", sexp![(Ident x)]).with_arg("x"))));
|
||||
let lines = ["type Foo", " ## Test indent handling", " ", " foo bar = foo"];
|
||||
test!(&lines.join("\n"),
|
||||
fn function_documentation() {
|
||||
test!([
|
||||
"## The Identity Function",
|
||||
"",
|
||||
" Arguments:",
|
||||
" - x: value to do nothing to",
|
||||
"id x = x",
|
||||
].join("\n"),
|
||||
,(Function::new("id", sexp![(Ident x)])
|
||||
.with_docs(sexp![
|
||||
((#((Section " The Identity Function") (Newline) (Newline)
|
||||
(Section "Arguments:") (Newline)
|
||||
(Section "- x: value to do nothing to")))
|
||||
#(()))])
|
||||
.with_arg("x")));
|
||||
test!(&["type Foo", " ## Test indent handling", " ", " foo bar = foo"].join("\n"),
|
||||
(TypeDef Foo #() #(
|
||||
(Documented
|
||||
(#((Section " Test indent handling")) #(() ()))
|
||||
,(Function::new("foo", sexp![(Ident foo)]).with_arg("bar"))))));
|
||||
,(Function::new("foo", sexp![(Ident foo)])
|
||||
.with_docs(sexp![((#((Section " Test indent handling"))) #(() ()))])
|
||||
.with_arg("bar")))));
|
||||
expect_invalid_node("expression ## unexpected doc comment on same line");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_documentation() {
|
||||
test_block!("## The value of x\nx",
|
||||
(ExpressionStatement ((#((Section " The value of x"))) #(())) (Ident x)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unused_documentation() {
|
||||
test!("## First docs\n## More docs\n\n## More docs after a gap",
|
||||
(Documentation (#((Section " First docs"))))
|
||||
(Documentation (#((Section " More docs"))))
|
||||
()
|
||||
(Documentation (#((Section " More docs after a gap")))));
|
||||
}
|
||||
|
||||
|
||||
// === Type Definitions ===
|
||||
|
||||
@ -215,14 +228,17 @@ fn type_constructors() {
|
||||
,(Constructor::new("Bar")
|
||||
.with_arg(sexp![(() (Ident a) (":" (Ident B)) ((OprApp (Ident C) (Ok ".") (Ident D))))]))
|
||||
)));
|
||||
test!("type Foo\n ## Bar\n Baz", (TypeDef Foo #() #(
|
||||
(Documented (#((Section " Bar")) #(()))
|
||||
,(Constructor::new("Baz"))))));
|
||||
test!(["type A", " Foo (a : Integer, b : Integer)"].join("\n"),
|
||||
(TypeDef A #()
|
||||
#(,(Constructor::new("Foo").with_arg(sexp![(() (Ident a) (":" (Invalid)) ())])))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_constructor_documentation() {
|
||||
test!("type Foo\n ## Bar\n Baz", (TypeDef Foo #() #(
|
||||
,(Constructor::new("Baz").with_docs(sexp![((#((Section " Bar"))) #(()))])))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_constructor_private() {
|
||||
test!(["type Foo", " private Bar"].join("\n"),
|
||||
@ -257,20 +273,18 @@ fn type_methods() {
|
||||
#(,(Function::new("number", block![(Ident x)]))
|
||||
,(Function::new("area", sexp![(OprApp (Ident x) (Ok "+") (Ident x))])
|
||||
.with_arg("self")))));
|
||||
let code = [
|
||||
"type Problem_Builder",
|
||||
" ## Returns a vector containing all reported problems, aggregated.",
|
||||
" build_problemset : Vector",
|
||||
" build_problemset self =",
|
||||
" self",
|
||||
];
|
||||
test!(code.join("\n"),
|
||||
test!([
|
||||
"type Problem_Builder",
|
||||
" ## Returns a vector containing all reported problems, aggregated.",
|
||||
" build_problemset : Vector",
|
||||
" build_problemset self =",
|
||||
" self",
|
||||
].join("\n"),
|
||||
(TypeDef Problem_Builder #() #(
|
||||
(Documented
|
||||
(#((Section " Returns a vector containing all reported problems, aggregated.")) #(()))
|
||||
,(Function::new("build_problemset", block![(Ident self)])
|
||||
.with_sig(sexp![(Ident Vector)])
|
||||
.with_arg("self"))))));
|
||||
,(Function::new("build_problemset", block![(Ident self)])
|
||||
.with_docs(sexp![((#((Section " Returns a vector containing all reported problems, aggregated."))) #(()))])
|
||||
.with_sig(sexp![(Ident Vector)])
|
||||
.with_arg("self")))));
|
||||
test!("[foo., bar.]",
|
||||
(Array (OprSectionBoundary 1 (OprApp (Ident foo) (Ok ".") ()))
|
||||
#(("," (OprSectionBoundary 1 (OprApp (Ident bar) (Ok ".") ()))))));
|
||||
@ -358,12 +372,19 @@ fn assignment_simple() {
|
||||
test!("foo = x", ,(Function::new("foo", sexp![(Ident x)])));
|
||||
// In a body block, this is a variable binding.
|
||||
test_block!("main =\n foo = x",
|
||||
,(Function::new("main", block![(Assignment (Ident foo) (Ident x))])));
|
||||
test_block!("foo=x", (Assignment (Ident foo) (Ident x)));
|
||||
test_block!("foo= x", (Assignment (Ident foo) (Ident x)));
|
||||
,(Function::new("main", block![,(Assignment::new("foo", sexp![(Ident x)]))])));
|
||||
test_block!("foo=x", ,(Assignment::new("foo", sexp![(Ident x)])));
|
||||
test_block!("foo= x", ,(Assignment::new("foo", sexp![(Ident x)])));
|
||||
expect_invalid_node("foo =x");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assignment_documentation() {
|
||||
test_block!("## The Foo\nfoo = x",
|
||||
,(Assignment::new("foo", sexp![(Ident x)])
|
||||
.with_docs(sexp![((#((Section " The Foo"))) #(()))])));
|
||||
}
|
||||
|
||||
|
||||
// === Functions ===
|
||||
|
||||
@ -563,13 +584,13 @@ fn code_block_body() {
|
||||
|
||||
#[test]
|
||||
fn code_block_operator() {
|
||||
let code = ["value = nums", " * each random", " + constant"];
|
||||
test_block!(code.join("\n"),
|
||||
(Assignment (Ident value)
|
||||
(OperatorBlockApplication (Ident nums)
|
||||
#(((Ok "*") (App (Ident each) (Ident random)))
|
||||
((Ok "+") (Ident constant)))
|
||||
#())));
|
||||
test_block!(["value = nums", " * each random", " + constant"].join("\n"),
|
||||
,(Assignment::new("value", sexp![
|
||||
(OperatorBlockApplication (Ident nums)
|
||||
#(((Ok "*") (App (Ident each) (Ident random)))
|
||||
((Ok "+") (Ident constant)))
|
||||
#())
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -588,14 +609,14 @@ fn code_block_argument_list() {
|
||||
test!("foo\n bar", (ArgumentBlockApplication (Ident foo) #((Ident bar))));
|
||||
|
||||
test_block!("value = foo\n bar",
|
||||
(Assignment (Ident value) (ArgumentBlockApplication (Ident foo) #((Ident bar)))));
|
||||
,(Assignment::new("value", sexp![(ArgumentBlockApplication (Ident foo) #((Ident bar)))])));
|
||||
|
||||
let code = ["value = foo", " +x", " bar"];
|
||||
test_block!(code.join("\n"),
|
||||
(Assignment (Ident value)
|
||||
(ArgumentBlockApplication (Ident foo) #(
|
||||
(OprSectionBoundary 1 (OprApp () (Ok "+") (Ident x)))
|
||||
(Ident bar)))));
|
||||
,(Assignment::new("value", sexp![
|
||||
(ArgumentBlockApplication (Ident foo) #(
|
||||
(OprSectionBoundary 1 (OprApp () (Ok "+") (Ident x)))
|
||||
(Ident bar)))])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -733,8 +754,8 @@ fn operator_sections() {
|
||||
(OprSectionBoundary 1 (OprApp (OprApp () (Ok "+") (Number () "1" ()))
|
||||
(Ok "+") (Ident x))));
|
||||
test_block!("increment = 1 +",
|
||||
(Assignment (Ident increment)
|
||||
(OprSectionBoundary 1 (OprApp (Number () "1" ()) (Ok "+") ()))));
|
||||
,(Assignment::new("increment", sexp![
|
||||
(OprSectionBoundary 1 (OprApp (Number () "1" ()) (Ok "+") ()))])));
|
||||
test!("1+ << 2*",
|
||||
(OprSectionBoundary 1
|
||||
(OprApp (OprApp (Number () "1" ()) (Ok "+") ())
|
||||
@ -805,17 +826,18 @@ fn unary_operator_at_end_of_expression() {
|
||||
fn unspaced_operator_sequence() {
|
||||
// Add a negated value.
|
||||
test_block!("x = y+-z",
|
||||
(Assignment (Ident x) (OprApp (Ident y) (Ok "+") (UnaryOprApp "-" (Ident z)))));
|
||||
,(Assignment::new("x", sexp![(OprApp (Ident y) (Ok "+") (UnaryOprApp "-" (Ident z)))])));
|
||||
// Create an operator section that adds a negated value to its input.
|
||||
test_block!("x = +-z",
|
||||
(Assignment (Ident x) (OprSectionBoundary 1
|
||||
(OprApp () (Ok "+") (UnaryOprApp "-" (Ident z))))));
|
||||
,(Assignment::new("x", sexp![
|
||||
(OprSectionBoundary 1
|
||||
(OprApp () (Ok "+") (UnaryOprApp "-" (Ident z))))])));
|
||||
// The `-` can only be lexed as a unary operator, and unary operators cannot form sections.
|
||||
expect_invalid_node("main =\n x = y+-");
|
||||
// Assign a negative number to x.
|
||||
test_block!("x=-1", (Assignment (Ident x) (UnaryOprApp "-" (Number () "1" ()))));
|
||||
test_block!("x=-1", ,(Assignment::new("x", sexp![(UnaryOprApp "-" (Number () "1" ()))])));
|
||||
// Assign a negated value to x.
|
||||
test_block!("x=-y", (Assignment (Ident x) (UnaryOprApp "-" (Ident y))));
|
||||
test_block!("x=-y", ,(Assignment::new("x", sexp![(UnaryOprApp "-" (Ident y))])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -844,7 +866,7 @@ fn minus_unary() {
|
||||
test!("-x", (UnaryOprApp "-" (Ident x)));
|
||||
test!("(-x)", (Group (UnaryOprApp "-" (Ident x))));
|
||||
test!("-(x * x)", (UnaryOprApp "-" (Group (OprApp (Ident x) (Ok "*") (Ident x)))));
|
||||
test_block!("x=-x", (Assignment (Ident x) (UnaryOprApp "-" (Ident x))));
|
||||
test_block!("x=-x", ,(Assignment::new("x", sexp![(UnaryOprApp "-" (Ident x))])));
|
||||
test!("-x+x", (OprApp (UnaryOprApp "-" (Ident x)) (Ok "+") (Ident x)));
|
||||
test!("-x*x", (OprApp (UnaryOprApp "-" (Ident x)) (Ok "*") (Ident x)));
|
||||
}
|
||||
@ -871,9 +893,9 @@ fn method_app_in_minus_unary() {
|
||||
#[test]
|
||||
fn autoscope_operator() {
|
||||
test!("x : ..True", (TypeSignatureDeclaration ((Ident x) ":" (AutoscopedIdentifier ".." True))));
|
||||
test_block!("x = ..True", (Assignment (Ident x) (AutoscopedIdentifier ".." True)));
|
||||
test_block!("x = ..True", ,(Assignment::new("x", sexp![(AutoscopedIdentifier ".." True)])));
|
||||
test_block!("x = f ..True",
|
||||
(Assignment (Ident x) (App (Ident f) (AutoscopedIdentifier ".." True))));
|
||||
,(Assignment::new("x", sexp![(App (Ident f) (AutoscopedIdentifier ".." True))])));
|
||||
expect_invalid_node("x = ..not_a_constructor");
|
||||
expect_invalid_node("x = case a of ..True -> True");
|
||||
expect_invalid_node("x = ..4");
|
||||
@ -1002,6 +1024,12 @@ fn type_signatures() {
|
||||
test!("val : Bool\nval = True",
|
||||
,(Function::new("val", sexp![(Ident True)])
|
||||
.with_sig(sexp![(Ident Bool)])));
|
||||
test!("val : Bool\n\nval = True",
|
||||
,(Function::new("val", sexp![(Ident True)])
|
||||
.with_sig(sexp![(Ident Bool)])));
|
||||
test!("val : Bool\n\n\nval = True",
|
||||
,(Function::new("val", sexp![(Ident True)])
|
||||
.with_sig(sexp![(Ident Bool)])));
|
||||
test!("val : Bool\ndifferent_name = True",
|
||||
(TypeSignatureDeclaration ((Ident val) ":" (Ident Bool)))
|
||||
,(Function::new("different_name", sexp![(Ident True)])));
|
||||
@ -1024,11 +1052,11 @@ fn type_signatures() {
|
||||
#[test]
|
||||
fn type_annotations() {
|
||||
test_block!("val = x : Int",
|
||||
(Assignment (Ident val) (TypeAnnotated (Ident x) ":" (Ident Int))));
|
||||
,(Assignment::new("val", sexp![(TypeAnnotated (Ident x) ":" (Ident Int))])));
|
||||
test_block!("val = foo (x : Int)",
|
||||
(Assignment (Ident val)
|
||||
(App (Ident foo)
|
||||
(Group (TypeAnnotated (Ident x) ":" (Ident Int))))));
|
||||
,(Assignment::new("val", sexp![
|
||||
(App (Ident foo)
|
||||
(Group (TypeAnnotated (Ident x) ":" (Ident Int))))])));
|
||||
test!("(x : My_Type _)",
|
||||
(Group
|
||||
(TypeAnnotated (Ident x)
|
||||
@ -1048,10 +1076,11 @@ fn type_annotations() {
|
||||
#[test]
|
||||
fn inline_text_literals() {
|
||||
test!(r#""I'm an inline raw text!""#, (TextLiteral #((Section "I'm an inline raw text!"))));
|
||||
test_block!(r#"zero_length = """#, (Assignment (Ident zero_length) (TextLiteral #())));
|
||||
test_block!(r#"zero_length = """#, ,(Assignment::new("zero_length", sexp![(TextLiteral #())])));
|
||||
test!(r#""type""#, (TextLiteral #((Section "type"))));
|
||||
test_block!(r#"unclosed = ""#, (Assignment (Ident unclosed) (TextLiteral #())));
|
||||
test_block!(r#"unclosed = "a"#, (Assignment (Ident unclosed) (TextLiteral #((Section "a")))));
|
||||
test_block!(r#"unclosed = ""#, ,(Assignment::new("unclosed", sexp![(TextLiteral #())])));
|
||||
test_block!(r#"unclosed = "a"#,
|
||||
,(Assignment::new("unclosed", sexp![(TextLiteral #((Section "a")))])));
|
||||
test!(r#"'Other quote type'"#, (TextLiteral #((Section "Other quote type"))));
|
||||
test!(r#""Non-escape: \n""#, (TextLiteral #((Section "Non-escape: \\n"))));
|
||||
test!(r#""Non-escape: \""#, (TextLiteral #((Section "Non-escape: \\"))));
|
||||
@ -1093,15 +1122,16 @@ x"#,
|
||||
(TextLiteral #((Section "multiline string that doesn't end in a newline")))
|
||||
(Ident x));
|
||||
test_block!("x = \"\"\"\n Indented multiline\nx",
|
||||
(Assignment (Ident x) (TextLiteral #((Section "Indented multiline"))))
|
||||
,(Assignment::new("x", sexp![(TextLiteral #((Section "Indented multiline")))]))
|
||||
(Ident x));
|
||||
test!("'''\n \\nEscape at start\n",
|
||||
(TextLiteral #((Escape 0x0A) (Section "Escape at start"))) ());
|
||||
test!("x =\n x = '''\n x\nx",
|
||||
,(Function::new("x", block![(Assignment (Ident x) (TextLiteral #((Section "x"))))]))
|
||||
,(Function::new("x", block![
|
||||
,(Assignment::new("x", sexp![(TextLiteral #((Section "x")))]))]))
|
||||
(Ident x));
|
||||
test_block!("foo = bar '''\n baz",
|
||||
(Assignment (Ident foo) (App (Ident bar) (TextLiteral #((Section "baz"))))));
|
||||
,(Assignment::new("foo", sexp![(App (Ident bar) (TextLiteral #((Section "baz"))))])));
|
||||
test!("'''\n \\t'", (TextLiteral #((Escape 0x09) (Section "'"))));
|
||||
test!("'''\n x\n \\t'", (TextLiteral #((Section "x") (Newline) (Escape 0x09) (Section "'"))));
|
||||
}
|
||||
@ -1188,10 +1218,10 @@ fn old_lambdas() {
|
||||
(App (Ident f) (OprApp (Ident x) (Ok "->") (BodyBlock #((Ident y))))));
|
||||
test!("x->y-> z", (OprApp (Ident x) (Ok "->") (OprApp (Ident y) (Ok "->") (Ident z))));
|
||||
test_block!("foo = x -> (y = bar x) -> x + y",
|
||||
(Assignment (Ident foo)
|
||||
(OprApp (Ident x) (Ok "->")
|
||||
(OprApp (Group (OprApp (Ident y) (Ok "=") (App (Ident bar) (Ident x)))) (Ok "->")
|
||||
(OprApp (Ident x) (Ok "+") (Ident y))))));
|
||||
,(Assignment::new("foo", sexp![
|
||||
(OprApp (Ident x) (Ok "->")
|
||||
(OprApp (Group (OprApp (Ident y) (Ok "=") (App (Ident bar) (Ident x)))) (Ok "->")
|
||||
(OprApp (Ident x) (Ok "+") (Ident y))))])));
|
||||
}
|
||||
|
||||
|
||||
@ -1200,8 +1230,10 @@ fn old_lambdas() {
|
||||
#[test]
|
||||
fn pattern_irrefutable() {
|
||||
test_block!("Point x_val = my_point",
|
||||
(Assignment (App (Ident Point) (Ident x_val)) (Ident my_point)));
|
||||
test_block!("Vector _ = x", (Assignment (App (Ident Vector) (Wildcard -1)) (Ident x)));
|
||||
,(Assignment::pattern(sexp![(App (Ident Point) (Ident x_val))],
|
||||
sexp![(Ident my_point)])));
|
||||
test_block!("Vector _ = x",
|
||||
,(Assignment::pattern(sexp![(App (Ident Vector) (Wildcard -1))], sexp![(Ident x)])));
|
||||
test_block!("X.y = z",
|
||||
,(Function::named(sexp![(OprApp (Ident X) (Ok ".") (Ident y))], sexp![(Ident z)])));
|
||||
}
|
||||
@ -1261,23 +1293,18 @@ fn case_expression() {
|
||||
|
||||
#[test]
|
||||
fn case_documentation() {
|
||||
#[rustfmt::skip]
|
||||
let code = [
|
||||
"case a of",
|
||||
" ## The Some case",
|
||||
" Some -> x",
|
||||
" ## The Int case",
|
||||
" Int -> x",
|
||||
];
|
||||
#[rustfmt::skip]
|
||||
let expected = block![
|
||||
test!([
|
||||
"case a of",
|
||||
" ## The Some case",
|
||||
" Some -> x",
|
||||
" ## The Int case",
|
||||
" Int -> x",
|
||||
].join("\n"),
|
||||
(CaseOf (Ident a) #(
|
||||
(((#((Section " The Some case")) #()) () () ()))
|
||||
((((#((Section " The Some case"))) #()) () () ()))
|
||||
((() (Ident Some) "->" (Ident x)))
|
||||
(((#((Section " The Int case")) #()) () () ()))
|
||||
((() (Ident Int) "->" (Ident x)))))
|
||||
];
|
||||
test(code.join("\n"), expected);
|
||||
((((#((Section " The Int case"))) #()) () () ()))
|
||||
((() (Ident Int) "->" (Ident x))))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1313,11 +1340,11 @@ fn suspended_default_arguments_in_pattern() {
|
||||
#[test]
|
||||
fn suspended_default_arguments_in_expression() {
|
||||
test_block!("c = self.value ...",
|
||||
(Assignment (Ident c)
|
||||
(App (OprApp (Ident self) (Ok ".") (Ident value)) (SuspendedDefaultArguments))));
|
||||
,(Assignment::new("c", sexp![
|
||||
(App (OprApp (Ident self) (Ok ".") (Ident value)) (SuspendedDefaultArguments))])));
|
||||
test_block!("c = self.value...",
|
||||
(Assignment (Ident c)
|
||||
(App (OprApp (Ident self) (Ok ".") (Ident value)) (SuspendedDefaultArguments))));
|
||||
,(Assignment::new("c", sexp![
|
||||
(App (OprApp (Ident self) (Ok ".") (Ident value)) (SuspendedDefaultArguments))])));
|
||||
}
|
||||
|
||||
// === Private (project-private) keyword ===
|
||||
@ -1403,7 +1430,7 @@ mod numbers {
|
||||
|
||||
#[test]
|
||||
fn with_decimal() {
|
||||
test_block!("pi = 3.14", (Assignment (Ident pi) (Number () "3" ("." "14"))));
|
||||
test_block!("pi = 3.14", ,(Assignment::new("pi", sexp![(Number () "3" ("." "14"))])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1557,11 +1584,11 @@ fn skip() {
|
||||
|
||||
#[test]
|
||||
fn statement_in_expression_context() {
|
||||
test_block!("x = y = z", (Assignment (Ident x) (Invalid)));
|
||||
test_block!("x = y = z", ,(Assignment::new("x", sexp![(Invalid)])));
|
||||
test!("(y = z)", (Group(Invalid)));
|
||||
test!("(y = z) x", (App (Group (Invalid)) (Ident x)));
|
||||
test_block!("(f x = x)", (Group(Invalid)));
|
||||
test_block!("y = f x = x", (Assignment (Ident y) (Invalid)));
|
||||
test_block!("y = f x = x", ,(Assignment::new("y", sexp![(Invalid)])));
|
||||
}
|
||||
|
||||
|
||||
@ -1857,6 +1884,7 @@ fn expect_valid(code: &str) {
|
||||
|
||||
/// Builder for function definitions.
|
||||
struct Function {
|
||||
docs: lexpr::Value,
|
||||
annotations: Vec<lexpr::Value>,
|
||||
signature: lexpr::Value,
|
||||
private: lexpr::Value,
|
||||
@ -1874,6 +1902,7 @@ impl Function {
|
||||
|
||||
fn named(name: lexpr::Value, body: lexpr::Value) -> Self {
|
||||
Self {
|
||||
docs: sexp![()],
|
||||
annotations: vec![],
|
||||
signature: sexp![()],
|
||||
private: sexp![()],
|
||||
@ -1884,6 +1913,11 @@ impl Function {
|
||||
}
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn with_docs(self, docs: lexpr::Value) -> Self {
|
||||
Self { docs, ..self }
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn with_annotation(mut self, annotation: &str, arg: lexpr::Value) -> Self {
|
||||
let annotation = lexpr::Value::symbol(annotation);
|
||||
@ -1913,8 +1947,8 @@ impl Function {
|
||||
|
||||
impl From<Function> for lexpr::Value {
|
||||
#[rustfmt::skip]
|
||||
fn from(Function { annotations, signature, private, name, args, ret, body }: Function) -> Self {
|
||||
sexp![(Function ,annotations ,signature ,private ,name ,args ,ret ,body)]
|
||||
fn from(Function { docs, annotations, signature, private, name, args, ret, body }: Function) -> Self {
|
||||
sexp![(Function ,docs ,annotations ,signature ,private ,name ,args ,ret ,body)]
|
||||
}
|
||||
}
|
||||
|
||||
@ -1943,6 +1977,7 @@ impl From<Arg> for lexpr::Value {
|
||||
|
||||
/// Builder for type constructor definitions.
|
||||
struct Constructor {
|
||||
docs: lexpr::Value,
|
||||
annotations: Vec<lexpr::Value>,
|
||||
private: lexpr::Value,
|
||||
name: lexpr::Value,
|
||||
@ -1953,6 +1988,7 @@ struct Constructor {
|
||||
impl Constructor {
|
||||
fn new(name: &str) -> Self {
|
||||
Self {
|
||||
docs: sexp![()],
|
||||
annotations: vec![],
|
||||
private: sexp![()],
|
||||
name: lexpr::Value::symbol(name),
|
||||
@ -1961,6 +1997,11 @@ impl Constructor {
|
||||
}
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn with_docs(self, docs: lexpr::Value) -> Self {
|
||||
Self { docs, ..self }
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn with_annotation(mut self, annotation: &str, arg: lexpr::Value) -> Self {
|
||||
let annotation = lexpr::Value::symbol(annotation);
|
||||
@ -1986,7 +2027,39 @@ impl Constructor {
|
||||
|
||||
impl From<Constructor> for lexpr::Value {
|
||||
#[rustfmt::skip]
|
||||
fn from(Constructor { annotations, private, name, args, arg_lines }: Constructor) -> Self {
|
||||
sexp![(ConstructorDefinition ,annotations ,private ,name ,args, arg_lines)]
|
||||
fn from(Constructor { docs, annotations, private, name, args, arg_lines }: Constructor) -> Self {
|
||||
sexp![(ConstructorDefinition ,docs ,annotations ,private ,name ,args, arg_lines)]
|
||||
}
|
||||
}
|
||||
|
||||
// === Assignments ===
|
||||
|
||||
/// Builder for variable assignments.
|
||||
struct Assignment {
|
||||
docs: lexpr::Value,
|
||||
pattern: lexpr::Value,
|
||||
value: lexpr::Value,
|
||||
}
|
||||
|
||||
impl Assignment {
|
||||
fn new(name: &str, body: lexpr::Value) -> Self {
|
||||
let name = lexpr::Value::symbol(name);
|
||||
Self::pattern(sexp![(Ident, name)], body)
|
||||
}
|
||||
|
||||
fn pattern(pattern: lexpr::Value, value: lexpr::Value) -> Self {
|
||||
Self { docs: sexp![()], pattern, value }
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn with_docs(self, docs: lexpr::Value) -> Self {
|
||||
Self { docs, ..self }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Assignment> for lexpr::Value {
|
||||
#[rustfmt::skip]
|
||||
fn from(Assignment { docs, pattern, value }: Assignment) -> Self {
|
||||
sexp![(Assignment ,docs ,pattern ,value)]
|
||||
}
|
||||
}
|
||||
|
@ -1,201 +0,0 @@
|
||||
//! Prints a debug representation of Enso documentation found in the given Enso source file(s).
|
||||
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![allow(clippy::option_map_unit_fn)]
|
||||
#![allow(clippy::precedence)]
|
||||
#![allow(dead_code)]
|
||||
#![deny(unconditional_recursion)]
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(unused_qualifications)]
|
||||
|
||||
use enso_doc_parser::*;
|
||||
use enso_parser::prelude::*;
|
||||
|
||||
use enso_parser::syntax::tree::DocComment;
|
||||
use enso_parser::syntax::tree::TextElement;
|
||||
|
||||
|
||||
|
||||
// ====================================
|
||||
// === Debug Representation Printer ===
|
||||
// ====================================
|
||||
|
||||
fn main() {
|
||||
let args = std::env::args().skip(1);
|
||||
if args.len() == 0 {
|
||||
use std::io::Read;
|
||||
let mut input = String::new();
|
||||
std::io::stdin().read_to_string(&mut input).unwrap();
|
||||
check_doc_parse("<stdin>", input.as_str());
|
||||
} else {
|
||||
args.for_each(|path| check_doc_parse(&path, &std::fs::read_to_string(&path).unwrap()));
|
||||
}
|
||||
}
|
||||
|
||||
/// Print the token for the input file.
|
||||
fn check_doc_parse(filename: &str, code: &str) {
|
||||
println!("File: {filename}");
|
||||
let docs = extract_docs(filename, code);
|
||||
for doc in &docs {
|
||||
for token in parse(doc) {
|
||||
println!("{token:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract docs from the input file.
|
||||
fn extract_docs(_filename: &str, mut code: &str) -> Vec<String> {
|
||||
if let Some((_meta, code_)) = enso_parser::metadata::parse(code) {
|
||||
code = code_;
|
||||
}
|
||||
let ast = enso_parser::Parser::new().parse_module(code);
|
||||
let docs = RefCell::new(vec![]);
|
||||
ast.visit_trees(|tree| match &tree.variant {
|
||||
enso_parser::syntax::tree::Variant::Documented(doc) => {
|
||||
docs.borrow_mut().push(doc.documentation.clone());
|
||||
}
|
||||
enso_parser::syntax::tree::Variant::CaseOf(case_of) => {
|
||||
for case in case_of.cases.iter().filter_map(|c| c.case.as_ref()) {
|
||||
docs.borrow_mut().extend(case.documentation.clone());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
docs.take().iter().map(content).collect()
|
||||
}
|
||||
|
||||
/// Return the contents of the comment, with leading whitespace, the `##` token, and following
|
||||
/// empty lines removed; newlines will be normalized.
|
||||
pub fn content(node: &DocComment) -> String {
|
||||
let mut buf = String::new();
|
||||
for element in &node.elements {
|
||||
match element {
|
||||
TextElement::Section { text } => buf.push_str(&text.code.repr),
|
||||
TextElement::Newline { .. } => buf.push('\n'),
|
||||
TextElement::Escape {
|
||||
token:
|
||||
token @ enso_parser::syntax::token::TextEscape {
|
||||
variant: enso_parser::syntax::token::variant::TextEscape { value },
|
||||
..
|
||||
},
|
||||
} => {
|
||||
if let Some(c) = value.to_char() {
|
||||
buf.push(c);
|
||||
} else {
|
||||
// Invalid escape character, or unpaired surrogate that can't be represented in
|
||||
// a Rust string.
|
||||
buf.push_str(**token.code)
|
||||
}
|
||||
}
|
||||
// Unreachable.
|
||||
TextElement::Splice { .. } => continue,
|
||||
}
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
/// Lex the given documentation, and return the sequence of tokens.
|
||||
fn parse(input: &str) -> Vec<Token> {
|
||||
let mut docs = TokenCollector::<IgnoredLocation>::default();
|
||||
let mut lexer = Lexer::default();
|
||||
for (line_number, line) in input.trim_start().lines().enumerate() {
|
||||
let location = Location::start_of_line(line_number);
|
||||
let line = Span { location, text: line };
|
||||
lexer.line::<IgnoredLocation>(line, &mut docs);
|
||||
}
|
||||
lexer.finish(&mut docs);
|
||||
docs.tokens
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Token Collector ===
|
||||
// =======================
|
||||
|
||||
/// Token consumer that reifies the sequence of tokens for debugging and tests.
|
||||
#[derive(Default, Debug)]
|
||||
struct TokenCollector<L> {
|
||||
tokens: Vec<Token>,
|
||||
location_type: ZST<L>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Token {
|
||||
Tag { tag: Tag, description: String },
|
||||
EnterMarkedSection { mark: Mark, header: String },
|
||||
EnterKeyedSection { header: String },
|
||||
Start(ScopeType),
|
||||
End(ScopeType),
|
||||
StartQuote,
|
||||
EndQuote,
|
||||
Text(String),
|
||||
RawLine(String),
|
||||
}
|
||||
|
||||
impl<L> TokenConsumer<L> for TokenCollector<L> {
|
||||
fn tag(&mut self, tag: Tag, description: Option<Span<L>>) {
|
||||
self.tokens.push(Token::Tag {
|
||||
tag,
|
||||
description: description.map(String::from).unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn enter_marked_section(&mut self, mark: Mark, header: Option<Span<L>>) {
|
||||
self.tokens.push(Token::EnterMarkedSection {
|
||||
mark,
|
||||
header: header.map(String::from).unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn enter_keyed_section(&mut self, header: Span<L>) {
|
||||
self.tokens.push(Token::EnterKeyedSection { header: header.into() })
|
||||
}
|
||||
|
||||
fn text(&mut self, text: Span<L>) {
|
||||
match self.tokens.last_mut() {
|
||||
Some(Token::Text(current)) => {
|
||||
current.push(' ');
|
||||
current.push_str(text.text.as_ref())
|
||||
}
|
||||
_ => self.tokens.push(Token::Text(text.text.into())),
|
||||
}
|
||||
}
|
||||
|
||||
fn start_list(&mut self) {
|
||||
self.tokens.push(Token::Start(ScopeType::List));
|
||||
}
|
||||
|
||||
fn start_list_item(&mut self) {
|
||||
self.tokens.push(Token::Start(ScopeType::ListItem));
|
||||
}
|
||||
|
||||
fn start_paragraph(&mut self) {
|
||||
self.tokens.push(Token::Start(ScopeType::Paragraph));
|
||||
}
|
||||
|
||||
fn start_raw(&mut self) {
|
||||
self.tokens.push(Token::Start(ScopeType::Raw));
|
||||
}
|
||||
|
||||
fn start_quote(&mut self) {
|
||||
self.tokens.push(Token::StartQuote);
|
||||
}
|
||||
|
||||
fn end_quote(&mut self) {
|
||||
self.tokens.push(Token::EndQuote);
|
||||
}
|
||||
|
||||
fn whitespace(&mut self) {
|
||||
self.tokens.push(Token::Text(" ".to_owned()));
|
||||
}
|
||||
|
||||
fn raw_line(&mut self, text: Span<L>) {
|
||||
self.tokens.push(Token::RawLine(text.text.into()));
|
||||
}
|
||||
|
||||
fn end(&mut self, scope: ScopeType) {
|
||||
self.tokens.push(Token::End(scope));
|
||||
}
|
||||
}
|
@ -9,6 +9,7 @@ use crate::expression_to_pattern;
|
||||
use crate::source::Code;
|
||||
use crate::syntax::maybe_with_error;
|
||||
use crate::syntax::operator;
|
||||
use crate::syntax::statement::try_parse_doc_comment;
|
||||
use crate::syntax::token;
|
||||
use crate::syntax::tree::SyntaxError;
|
||||
use crate::syntax::Item;
|
||||
@ -333,11 +334,12 @@ fn parse_case_line<'s>(
|
||||
precedence: &mut operator::Precedence<'s>,
|
||||
) -> (syntax::tree::CaseLine<'s>, Option<SyntaxError>) {
|
||||
let syntax::item::Line { newline, mut items } = line;
|
||||
if let documentation @ Some(_) = try_parse_doc_comment(&mut items, precedence) {
|
||||
if let Some(docs) = try_parse_doc_comment(&mut items) {
|
||||
let doc_line = Some(syntax::tree::DocLine { docs, newlines: vec![] });
|
||||
return (
|
||||
syntax::tree::CaseLine {
|
||||
newline: newline.into(),
|
||||
case: Some(syntax::tree::Case { documentation, ..default() }),
|
||||
case: Some(syntax::tree::Case { doc_line, ..default() }),
|
||||
},
|
||||
default(),
|
||||
);
|
||||
@ -398,30 +400,6 @@ fn find_top_level_arrow(items: &[Item]) -> Option<usize> {
|
||||
}
|
||||
}
|
||||
|
||||
fn try_parse_doc_comment<'s>(
|
||||
items: &mut Vec<Item<'s>>,
|
||||
precedence: &mut operator::Precedence<'s>,
|
||||
) -> Option<syntax::tree::DocComment<'s>> {
|
||||
if matches!(
|
||||
items.first(),
|
||||
Some(Item::Token(token @ Token { variant: token::Variant::TextStart(_), .. })) if token.code == "##"
|
||||
) {
|
||||
let Some(syntax::Tree {
|
||||
variant: syntax::tree::Variant::Documented(mut documented),
|
||||
span,
|
||||
..
|
||||
}) = precedence.resolve(items)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
debug_assert_eq!(documented.expression, None);
|
||||
documented.documentation.open.left_offset += span.left_offset;
|
||||
Some(documented.documentation)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Array literal.
|
||||
pub fn array<'s>() -> Definition<'s> {
|
||||
crate::macro_definition! {("[", everything(), "]", nothing()) array_body}
|
||||
|
@ -18,10 +18,14 @@ use crate::syntax::statement::type_def::try_parse_type_def;
|
||||
use crate::syntax::token;
|
||||
use crate::syntax::tree;
|
||||
use crate::syntax::tree::block;
|
||||
use crate::syntax::tree::AnnotationLine;
|
||||
use crate::syntax::tree::ArgumentDefinition;
|
||||
use crate::syntax::tree::DocComment;
|
||||
use crate::syntax::tree::DocLine;
|
||||
use crate::syntax::tree::FunctionAnnotation;
|
||||
use crate::syntax::tree::SyntaxError;
|
||||
use crate::syntax::tree::TypeSignature;
|
||||
use crate::syntax::tree::TypeSignatureLine;
|
||||
use crate::syntax::treebuilding::Spacing;
|
||||
use crate::syntax::Item;
|
||||
use crate::syntax::Token;
|
||||
@ -44,7 +48,7 @@ impl<'s> BodyBlockParser<'s> {
|
||||
) -> Tree<'s> {
|
||||
let lines = compound_lines_with_tail_expression(lines, |prefixes, line, is_tail| {
|
||||
if is_tail {
|
||||
self.statement_parser.parse_tail_expression(line, precedence)
|
||||
self.statement_parser.parse_tail_expression(prefixes, line, precedence)
|
||||
} else {
|
||||
self.statement_parser.parse_statement(prefixes, line, precedence)
|
||||
}
|
||||
@ -68,7 +72,7 @@ impl<'s> BodyBlockParser<'s> {
|
||||
fn compound_lines<'s>(
|
||||
lines: &mut Vec<item::Line<'s>>,
|
||||
mut parse_line: impl FnMut(
|
||||
&mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
&mut StatementPrefixes<'s>,
|
||||
item::Line<'s>,
|
||||
) -> Line<'s, StatementOrPrefix<'s>>,
|
||||
) -> Vec<block::Line<'s>> {
|
||||
@ -82,7 +86,7 @@ fn compound_lines<'s>(
|
||||
fn compound_lines_with_tail_expression<'s>(
|
||||
lines: &mut Vec<item::Line<'s>>,
|
||||
parse_line: impl FnMut(
|
||||
&mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
&mut StatementPrefixes<'s>,
|
||||
item::Line<'s>,
|
||||
bool,
|
||||
) -> Line<'s, StatementOrPrefix<'s>>,
|
||||
@ -97,36 +101,32 @@ fn compound_lines_with_tail_expression<'s>(
|
||||
fn compound_lines_maybe_with_tail_expression<'s>(
|
||||
lines: &mut Vec<item::Line<'s>>,
|
||||
mut parse_line: impl FnMut(
|
||||
&mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
&mut StatementPrefixes<'s>,
|
||||
item::Line<'s>,
|
||||
bool,
|
||||
) -> Line<'s, StatementOrPrefix<'s>>,
|
||||
tail_index: Option<usize>,
|
||||
) -> Vec<block::Line<'s>> {
|
||||
let mut block_lines = Vec::new();
|
||||
let mut line_prefixes = Vec::new();
|
||||
let mut line_prefixes = StatementPrefixes::default();
|
||||
for (i, line) in lines.drain(..).enumerate() {
|
||||
let is_tail = tail_index == Some(i);
|
||||
match parse_line(&mut line_prefixes, line, is_tail) {
|
||||
Line { newline, content: Some(StatementOrPrefix::Statement(statement)) } => {
|
||||
for Line { newline, content } in line_prefixes.drain(..) {
|
||||
block_lines.push(block::Line { newline, expression: content.map(Tree::from) })
|
||||
}
|
||||
line_prefixes.drain_unused_into(&mut block_lines);
|
||||
block_lines.push(block::Line { newline, expression: Some(statement) })
|
||||
}
|
||||
Line { newline, content: Some(StatementOrPrefix::Prefix(prefix)) } =>
|
||||
line_prefixes.push(Line { newline, content: Some(prefix) }),
|
||||
line_prefixes.push(newline, prefix),
|
||||
Line { newline, content: None } =>
|
||||
if line_prefixes.is_empty() {
|
||||
if line_prefixes.prefixes.is_empty() {
|
||||
block_lines.push(newline.into());
|
||||
} else {
|
||||
line_prefixes.push(newline.into());
|
||||
line_prefixes.push_newline(newline);
|
||||
},
|
||||
}
|
||||
}
|
||||
for Line { newline, content } in line_prefixes {
|
||||
block_lines.push(block::Line { newline, expression: content.map(Tree::from) })
|
||||
}
|
||||
line_prefixes.drain_unused_into(&mut block_lines);
|
||||
block_lines
|
||||
}
|
||||
|
||||
@ -157,7 +157,7 @@ struct StatementParser<'s> {
|
||||
impl<'s> StatementParser<'s> {
|
||||
fn parse_statement(
|
||||
&mut self,
|
||||
prefixes: &mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
line: item::Line<'s>,
|
||||
precedence: &mut Precedence<'s>,
|
||||
) -> Line<'s, StatementOrPrefix<'s>> {
|
||||
@ -170,10 +170,11 @@ impl<'s> StatementParser<'s> {
|
||||
|
||||
fn parse_tail_expression(
|
||||
&mut self,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
line: item::Line<'s>,
|
||||
precedence: &mut Precedence<'s>,
|
||||
) -> Line<'s, StatementOrPrefix<'s>> {
|
||||
parse_statement(&mut vec![], line, precedence, &mut self.args_buffer, StatementContext {
|
||||
parse_statement(prefixes, line, precedence, &mut self.args_buffer, StatementContext {
|
||||
evaluation_context: EvaluationContext::Eager,
|
||||
visibility_context: VisibilityContext::Private,
|
||||
tail_expression: true,
|
||||
@ -182,7 +183,7 @@ impl<'s> StatementParser<'s> {
|
||||
|
||||
fn parse_module_statement(
|
||||
&mut self,
|
||||
prefixes: &mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
line: item::Line<'s>,
|
||||
precedence: &mut Precedence<'s>,
|
||||
) -> Line<'s, StatementOrPrefix<'s>> {
|
||||
@ -219,6 +220,7 @@ fn scan_private_keywords<'s>(items: impl IntoIterator<Item = impl AsRef<Item<'s>
|
||||
enum StatementPrefix<'s> {
|
||||
TypeSignature(TypeSignature<'s>),
|
||||
Annotation(FunctionAnnotation<'s>),
|
||||
Documentation(DocComment<'s>),
|
||||
}
|
||||
|
||||
impl<'s> From<StatementPrefix<'s>> for Tree<'s> {
|
||||
@ -228,6 +230,7 @@ impl<'s> From<StatementPrefix<'s>> for Tree<'s> {
|
||||
Tree::type_signature_declaration(signature),
|
||||
StatementPrefix::Annotation(annotation) =>
|
||||
Tree::annotation(annotation).with_error(SyntaxError::AnnotationExpectedDefinition),
|
||||
StatementPrefix::Documentation(docs) => Tree::documentation(docs),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -261,8 +264,27 @@ impl<'s> From<Tree<'s>> for StatementOrPrefix<'s> {
|
||||
}
|
||||
}
|
||||
|
||||
enum StatementPrefixLine<'s> {
|
||||
TypeSignature(TypeSignatureLine<'s>),
|
||||
Annotation(AnnotationLine<'s>),
|
||||
Documentation(DocLine<'s>),
|
||||
}
|
||||
|
||||
impl<'s> StatementPrefixLine<'s> {
|
||||
fn new(prefix: StatementPrefix<'s>, newlines: NonEmptyVec<token::Newline<'s>>) -> Self {
|
||||
match prefix {
|
||||
StatementPrefix::TypeSignature(signature) =>
|
||||
Self::TypeSignature(TypeSignatureLine { signature, newlines }),
|
||||
StatementPrefix::Annotation(annotation) =>
|
||||
Self::Annotation(AnnotationLine { annotation, newlines }),
|
||||
StatementPrefix::Documentation(docs) =>
|
||||
Self::Documentation(DocLine { docs, newlines: newlines.into() }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_statement<'s>(
|
||||
prefixes: &mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
mut line: item::Line<'s>,
|
||||
precedence: &mut Precedence<'s>,
|
||||
args_buffer: &mut Vec<ArgumentDefinition<'s>>,
|
||||
@ -273,29 +295,32 @@ fn parse_statement<'s>(
|
||||
let private_keywords = scan_private_keywords(&line.items);
|
||||
let start = private_keywords;
|
||||
let items = &mut line.items;
|
||||
if let Some(annotation) = try_parse_annotation(items, start, precedence) {
|
||||
let parsed = None
|
||||
.or_else(|| {
|
||||
try_parse_annotation(items, start, precedence)
|
||||
.map(StatementPrefix::Annotation)
|
||||
.map(StatementOrPrefix::Prefix)
|
||||
})
|
||||
.or_else(|| {
|
||||
try_parse_type_def(items, start, precedence, args_buffer)
|
||||
.map(StatementOrPrefix::Statement)
|
||||
})
|
||||
.or_else(|| {
|
||||
try_parse_doc_comment(items)
|
||||
.map(StatementPrefix::Documentation)
|
||||
.map(StatementOrPrefix::Prefix)
|
||||
});
|
||||
if let Some(parsed) = parsed {
|
||||
debug_assert_eq!(items.len(), start);
|
||||
return Line {
|
||||
newline,
|
||||
content: apply_private_keywords(
|
||||
Some(StatementOrPrefix::Prefix(StatementPrefix::Annotation(annotation))),
|
||||
Some(parsed),
|
||||
items.drain(..),
|
||||
statement_context.visibility_context,
|
||||
),
|
||||
};
|
||||
}
|
||||
if let Some(type_def) = try_parse_type_def(items, start, precedence, args_buffer) {
|
||||
debug_assert_eq!(items.len(), start);
|
||||
return Line {
|
||||
newline,
|
||||
content: apply_private_keywords(
|
||||
Some(type_def),
|
||||
items.drain(..),
|
||||
statement_context.visibility_context,
|
||||
)
|
||||
.map(StatementOrPrefix::Statement),
|
||||
};
|
||||
}
|
||||
let top_level_operator = match find_top_level_operator(&items[start..]) {
|
||||
Ok(top_level_operator) => top_level_operator.map(|(i, t)| (i + start, t)),
|
||||
Err(e) =>
|
||||
@ -334,19 +359,190 @@ fn parse_statement<'s>(
|
||||
}
|
||||
}
|
||||
Some(_) => unreachable!(),
|
||||
None => {
|
||||
let statement = precedence.resolve_offset(start, items);
|
||||
debug_assert!(items.len() <= start);
|
||||
Line {
|
||||
newline,
|
||||
content: apply_private_keywords(
|
||||
statement,
|
||||
items.drain(..),
|
||||
statement_context.visibility_context,
|
||||
)
|
||||
.map(StatementOrPrefix::Statement),
|
||||
}
|
||||
None => parse_expression_statement(
|
||||
prefixes,
|
||||
start,
|
||||
item::Line { newline, items: mem::take(items) },
|
||||
precedence,
|
||||
statement_context.visibility_context,
|
||||
)
|
||||
.map_content(StatementOrPrefix::Statement),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct StatementPrefixes<'s> {
|
||||
prefixes: Vec<(token::Newline<'s>, StatementPrefix<'s>, usize)>,
|
||||
newlines: Vec<token::Newline<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> StatementPrefixes<'s> {
|
||||
fn push(&mut self, newline: token::Newline<'s>, prefix: StatementPrefix<'s>) {
|
||||
let newlines_start = self.newlines.len();
|
||||
self.prefixes.push((newline, prefix, newlines_start))
|
||||
}
|
||||
|
||||
fn push_newline(&mut self, newline: token::Newline<'s>) {
|
||||
self.newlines.push(newline)
|
||||
}
|
||||
|
||||
fn last(&self) -> Option<&StatementPrefix<'s>> {
|
||||
self.prefixes.last().map(|(_, prefix, _)| prefix)
|
||||
}
|
||||
|
||||
/// `first_newline`:
|
||||
/// - Before the call, must contain the first newline after the prefix.
|
||||
/// - Upon return, will contain the newline before the prefix.
|
||||
fn pop(&mut self, first_newline: &mut token::Newline<'s>) -> StatementPrefixLine<'s> {
|
||||
let (newline_before_prefix, prefix, trailing_newlines_start) = self.prefixes.pop().unwrap();
|
||||
let original_first_newline = mem::replace(first_newline, newline_before_prefix);
|
||||
let trailing_newlines = self.newlines.drain(trailing_newlines_start..);
|
||||
let mut newlines = Vec::with_capacity(trailing_newlines.len() + 1);
|
||||
newlines.extend(trailing_newlines);
|
||||
let newlines = NonEmptyVec::from_vec_and_last(newlines, original_first_newline);
|
||||
StatementPrefixLine::new(prefix, newlines)
|
||||
}
|
||||
|
||||
fn drain_unused_into(&mut self, lines: &mut Vec<block::Line<'s>>) {
|
||||
lines.reserve(self.prefixes.len() + self.newlines.len());
|
||||
let mut empty_lines = self.newlines.drain(..).map(block::Line::from);
|
||||
let mut prev_trailing_newlines_start = 0;
|
||||
for (newline_before_prefix, prefix, trailing_newlines_start) in self.prefixes.drain(..) {
|
||||
let trailing_newlines = trailing_newlines_start - prev_trailing_newlines_start;
|
||||
prev_trailing_newlines_start = trailing_newlines_start;
|
||||
lines.extend((&mut empty_lines).take(trailing_newlines));
|
||||
lines.push(block::Line {
|
||||
newline: newline_before_prefix,
|
||||
expression: Some(prefix.into()),
|
||||
});
|
||||
}
|
||||
lines.extend(empty_lines);
|
||||
}
|
||||
}
|
||||
|
||||
fn take_doc_line<'s>(
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
first_newline: &mut token::Newline<'s>,
|
||||
) -> Option<DocLine<'s>> {
|
||||
if let Some(StatementPrefix::Documentation(_)) = prefixes.last() {
|
||||
let StatementPrefixLine::Documentation(doc_line) = prefixes.pop(first_newline) else {
|
||||
unreachable!()
|
||||
};
|
||||
Some(doc_line)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_expression_statement<'s>(
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
start: usize,
|
||||
mut line: item::Line<'s>,
|
||||
precedence: &mut Precedence<'s>,
|
||||
visibility_context: VisibilityContext,
|
||||
) -> Line<'s, Tree<'s>> {
|
||||
let expression = precedence.resolve_offset(start, &mut line.items);
|
||||
debug_assert!(line.items.len() <= start);
|
||||
let expression = apply_private_keywords(expression, line.items.drain(..), visibility_context);
|
||||
let mut first_newline = line.newline;
|
||||
let expression =
|
||||
expression.map(|expression| to_statement(prefixes, &mut first_newline, expression));
|
||||
Line { newline: first_newline, content: expression }
|
||||
}
|
||||
|
||||
/// `first_newline`:
|
||||
/// - Before the call, must contain the first newline before `expression_or_statement`.
|
||||
/// - Upon return, will contain the newline before the returned `Tree` (which will be different from
|
||||
/// the passed value if any prefixes were consumed).
|
||||
fn to_statement<'s>(
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
first_newline: &mut token::Newline<'s>,
|
||||
expression_or_statement: Tree<'s>,
|
||||
) -> Tree<'s> {
|
||||
use tree::Variant::*;
|
||||
let is_expression = match &expression_or_statement.variant {
|
||||
// Currently could be expression or statement--treating as expression.
|
||||
Invalid(_) => true,
|
||||
// Currently could be expression or statement--treating as statement so prefix-line
|
||||
// annotations don't affect how documentation is attached to a type.
|
||||
AnnotatedBuiltin(_) => false,
|
||||
// Expression
|
||||
ArgumentBlockApplication(_)
|
||||
| OperatorBlockApplication(_)
|
||||
| Ident(_)
|
||||
| Number(_)
|
||||
| Wildcard(_)
|
||||
| SuspendedDefaultArguments(_)
|
||||
| TextLiteral(_)
|
||||
| App(_)
|
||||
| NamedApp(_)
|
||||
| OprApp(_)
|
||||
| UnaryOprApp(_)
|
||||
| AutoscopedIdentifier(_)
|
||||
| OprSectionBoundary(_)
|
||||
| TemplateFunction(_)
|
||||
| MultiSegmentApp(_)
|
||||
| Group(_)
|
||||
| TypeAnnotated(_)
|
||||
| CaseOf(_)
|
||||
| Lambda(_)
|
||||
| Array(_)
|
||||
| Tuple(_) => true,
|
||||
// Statement
|
||||
Private(_)
|
||||
| TypeDef(_)
|
||||
| Assignment(_)
|
||||
| Function(_)
|
||||
| ForeignFunction(_)
|
||||
| Import(_)
|
||||
| Export(_)
|
||||
| TypeSignatureDeclaration(_)
|
||||
| Annotation(_)
|
||||
| Documentation(_)
|
||||
| ConstructorDefinition(_) => false,
|
||||
// Unexpected here
|
||||
BodyBlock(_) | ExpressionStatement(_) => false,
|
||||
};
|
||||
if is_expression {
|
||||
let doc_line = take_doc_line(prefixes, first_newline);
|
||||
Tree::expression_statement(doc_line, expression_or_statement)
|
||||
} else {
|
||||
expression_or_statement
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the input as a documentation comment, if it matches the syntax.
|
||||
pub fn try_parse_doc_comment<'s>(items: &mut Vec<Item<'s>>) -> Option<DocComment<'s>> {
|
||||
match items.first() {
|
||||
Some(Item::Token(token @ Token { variant: token::Variant::TextStart(_), .. }))
|
||||
if token.code.repr.0 == "##" =>
|
||||
{
|
||||
let mut items = items.drain(..);
|
||||
let Some(Item::Token(open)) = items.next() else { unreachable!() };
|
||||
let elements = items
|
||||
.filter_map(|item| {
|
||||
let Item::Token(token) = item else { unreachable!() };
|
||||
match token.variant {
|
||||
token::Variant::TextSection(variant) => {
|
||||
let token = token.with_variant(variant);
|
||||
Some(tree::TextElement::Section { text: token })
|
||||
}
|
||||
token::Variant::TextEscape(variant) => {
|
||||
let token = token.with_variant(variant);
|
||||
Some(tree::TextElement::Escape { token })
|
||||
}
|
||||
token::Variant::TextNewline(_) => {
|
||||
let token = token::newline(token.left_offset, token.code);
|
||||
Some(tree::TextElement::Newline { newline: token })
|
||||
}
|
||||
token::Variant::TextEnd(_) => None,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Some(DocComment { open: open.with_variant(token::variant::TextStart()), elements })
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -473,7 +669,7 @@ enum VisibilityContext {
|
||||
}
|
||||
|
||||
fn parse_assignment_like_statement<'s>(
|
||||
prefixes: &mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
mut line: item::Line<'s>,
|
||||
start: usize,
|
||||
operator: usize,
|
||||
@ -537,14 +733,14 @@ fn parse_assignment_like_statement<'s>(
|
||||
(expression, Some(qn_len)) => Type::Function { expression, qn_len },
|
||||
(None, None) => Type::InvalidNoExpressionNoQn,
|
||||
} {
|
||||
Type::Assignment { expression } => Line {
|
||||
newline,
|
||||
content: apply_private_keywords(
|
||||
Some(parse_assignment(start, items, operator, expression, precedence)),
|
||||
items.drain(..),
|
||||
visibility_context,
|
||||
),
|
||||
},
|
||||
Type::Assignment { expression } => AssignmentBuilder::new(
|
||||
start,
|
||||
item::Line { newline, items: mem::take(items) },
|
||||
operator,
|
||||
expression,
|
||||
precedence,
|
||||
)
|
||||
.build(prefixes, visibility_context),
|
||||
Type::Function { expression, qn_len } => FunctionBuilder::new(
|
||||
item::Line { newline, items: mem::take(items) },
|
||||
start,
|
||||
@ -567,16 +763,45 @@ fn parse_assignment_like_statement<'s>(
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_assignment<'s>(
|
||||
start: usize,
|
||||
items: &mut Vec<Item<'s>>,
|
||||
operator: token::AssignmentOperator<'s>,
|
||||
expression: Tree<'s>,
|
||||
precedence: &mut Precedence<'s>,
|
||||
) -> Tree<'s> {
|
||||
let pattern =
|
||||
expression_to_pattern(precedence.resolve_non_section_offset(start, items).unwrap());
|
||||
Tree::assignment(pattern, operator, expression)
|
||||
struct AssignmentBuilder<'s> {
|
||||
newline: token::Newline<'s>,
|
||||
pattern: Tree<'s>,
|
||||
operator: token::AssignmentOperator<'s>,
|
||||
expression: Tree<'s>,
|
||||
excess_items: Vec<Item<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> AssignmentBuilder<'s> {
|
||||
fn new(
|
||||
start: usize,
|
||||
mut line: item::Line<'s>,
|
||||
operator: token::AssignmentOperator<'s>,
|
||||
expression: Tree<'s>,
|
||||
precedence: &mut Precedence<'s>,
|
||||
) -> Self {
|
||||
let pattern = expression_to_pattern(
|
||||
precedence.resolve_non_section_offset(start, &mut line.items).unwrap(),
|
||||
);
|
||||
Self { newline: line.newline, pattern, operator, expression, excess_items: line.items }
|
||||
}
|
||||
|
||||
fn build(
|
||||
self,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
visibility_context: VisibilityContext,
|
||||
) -> Line<'s, Tree<'s>> {
|
||||
let Self { newline, pattern, operator, expression, excess_items } = self;
|
||||
let mut first_newline = newline;
|
||||
let doc_line = take_doc_line(prefixes, &mut first_newline);
|
||||
Line {
|
||||
newline: first_newline,
|
||||
content: apply_private_keywords(
|
||||
Some(Tree::assignment(doc_line, pattern, operator, expression)),
|
||||
excess_items.into_iter(),
|
||||
visibility_context,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_pattern<'s>(
|
||||
|
@ -10,6 +10,8 @@ use crate::syntax::statement::find_top_level_operator;
|
||||
use crate::syntax::statement::parse_pattern;
|
||||
use crate::syntax::statement::Line;
|
||||
use crate::syntax::statement::StatementPrefix;
|
||||
use crate::syntax::statement::StatementPrefixLine;
|
||||
use crate::syntax::statement::StatementPrefixes;
|
||||
use crate::syntax::statement::VisibilityContext;
|
||||
use crate::syntax::token;
|
||||
use crate::syntax::tree;
|
||||
@ -18,6 +20,7 @@ use crate::syntax::tree::ArgumentDefault;
|
||||
use crate::syntax::tree::ArgumentDefinition;
|
||||
use crate::syntax::tree::ArgumentDefinitionLine;
|
||||
use crate::syntax::tree::ArgumentType;
|
||||
use crate::syntax::tree::DocLine;
|
||||
use crate::syntax::tree::ReturnSpecification;
|
||||
use crate::syntax::tree::SyntaxError;
|
||||
use crate::syntax::tree::TypeSignatureLine;
|
||||
@ -70,7 +73,7 @@ impl<'s> FunctionBuilder<'s> {
|
||||
|
||||
pub fn build(
|
||||
mut self,
|
||||
prefixes: &mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
operator: token::AssignmentOperator<'s>,
|
||||
expression: Option<Tree<'s>>,
|
||||
visibility_context: VisibilityContext,
|
||||
@ -86,6 +89,7 @@ impl<'s> FunctionBuilder<'s> {
|
||||
|
||||
#[derive(Default)]
|
||||
struct PrefixesAccumulator<'s> {
|
||||
docs: Option<DocLine<'s>>,
|
||||
annotations: Option<Vec<AnnotationLine<'s>>>,
|
||||
signature: Option<TypeSignatureLine<'s>>,
|
||||
}
|
||||
@ -93,41 +97,36 @@ impl<'s> FunctionBuilder<'s> {
|
||||
let mut acc = PrefixesAccumulator::default();
|
||||
|
||||
while let Some(prefix) = prefixes.last() {
|
||||
let Some(content) = prefix.content.as_ref() else { break };
|
||||
match (&acc, &content) {
|
||||
match (&acc, &prefix) {
|
||||
(
|
||||
PrefixesAccumulator { annotations: None, signature: None },
|
||||
PrefixesAccumulator { docs: None, annotations: None, signature: None },
|
||||
StatementPrefix::TypeSignature(signature),
|
||||
) if qn_equivalent(&self.name, &signature.name) => {
|
||||
let Some(Line {
|
||||
newline: outer_newline,
|
||||
content: Some(StatementPrefix::TypeSignature(signature)),
|
||||
}) = prefixes.pop()
|
||||
let StatementPrefixLine::TypeSignature(signature_line) =
|
||||
prefixes.pop(&mut first_newline)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
let newline = mem::replace(&mut first_newline, outer_newline);
|
||||
acc.signature = Some(TypeSignatureLine {
|
||||
signature,
|
||||
newlines: NonEmptyVec::singleton(newline),
|
||||
});
|
||||
acc.signature = Some(signature_line);
|
||||
}
|
||||
(PrefixesAccumulator { .. }, StatementPrefix::Annotation(_)) => {
|
||||
let Some(Line {
|
||||
newline: outer_newline,
|
||||
content: Some(StatementPrefix::Annotation(annotation)),
|
||||
}) = prefixes.pop()
|
||||
(PrefixesAccumulator { docs: None, .. }, StatementPrefix::Annotation(_)) => {
|
||||
let StatementPrefixLine::Annotation(annotation_line) =
|
||||
prefixes.pop(&mut first_newline)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
let newline = mem::replace(&mut first_newline, outer_newline);
|
||||
let mut annotations = acc.annotations.take().unwrap_or_default();
|
||||
annotations.push(AnnotationLine {
|
||||
annotation,
|
||||
newlines: NonEmptyVec::singleton(newline),
|
||||
});
|
||||
annotations.push(annotation_line);
|
||||
acc.annotations = Some(annotations);
|
||||
}
|
||||
(PrefixesAccumulator { docs: None, .. }, StatementPrefix::Documentation(_)) => {
|
||||
let StatementPrefixLine::Documentation(doc_line) =
|
||||
prefixes.pop(&mut first_newline)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
acc.docs = Some(doc_line);
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
@ -137,11 +136,13 @@ impl<'s> FunctionBuilder<'s> {
|
||||
annotations.reverse();
|
||||
annotations
|
||||
};
|
||||
let docs = acc.docs;
|
||||
|
||||
Line {
|
||||
newline: first_newline,
|
||||
content: apply_private_keywords(
|
||||
Some(Tree::function(
|
||||
docs,
|
||||
annotations,
|
||||
signature,
|
||||
private,
|
||||
@ -198,7 +199,7 @@ pub fn parse_args<'s>(
|
||||
}
|
||||
|
||||
pub fn parse_constructor_definition<'s>(
|
||||
prefixes: &mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
mut line: item::Line<'s>,
|
||||
private_keywords_start: usize,
|
||||
start: usize,
|
||||
@ -221,21 +222,28 @@ pub fn parse_constructor_definition<'s>(
|
||||
|
||||
let mut first_newline = newline;
|
||||
let mut annotations_reversed = vec![];
|
||||
let mut doc_line = None;
|
||||
while let Some(prefix) = prefixes.last() {
|
||||
let Some(content) = prefix.content.as_ref() else { break };
|
||||
if let StatementPrefix::Annotation(_) = &content {
|
||||
let Some(Line {
|
||||
newline: outer_newline,
|
||||
content: Some(StatementPrefix::Annotation(annotation)),
|
||||
}) = prefixes.pop()
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
let newline = mem::replace(&mut first_newline, outer_newline);
|
||||
annotations_reversed
|
||||
.push(AnnotationLine { annotation, newlines: NonEmptyVec::singleton(newline) });
|
||||
} else {
|
||||
break;
|
||||
match &prefix {
|
||||
StatementPrefix::Annotation(_) => {
|
||||
let StatementPrefixLine::Annotation(annotation_line) =
|
||||
prefixes.pop(&mut first_newline)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
annotations_reversed.push(annotation_line);
|
||||
}
|
||||
StatementPrefix::Documentation(_) => {
|
||||
let StatementPrefixLine::Documentation(line) = prefixes.pop(&mut first_newline)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
doc_line = Some(line);
|
||||
break;
|
||||
}
|
||||
_ => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
let annotations = {
|
||||
@ -243,7 +251,8 @@ pub fn parse_constructor_definition<'s>(
|
||||
annotations_reversed
|
||||
};
|
||||
|
||||
let def = Tree::constructor_definition(annotations, private, name, inline_args, block_args);
|
||||
let def =
|
||||
Tree::constructor_definition(doc_line, annotations, private, name, inline_args, block_args);
|
||||
|
||||
Line {
|
||||
newline: first_newline,
|
||||
|
@ -13,7 +13,7 @@ use crate::syntax::statement::EvaluationContext;
|
||||
use crate::syntax::statement::Line;
|
||||
use crate::syntax::statement::StatementContext;
|
||||
use crate::syntax::statement::StatementOrPrefix;
|
||||
use crate::syntax::statement::StatementPrefix;
|
||||
use crate::syntax::statement::StatementPrefixes;
|
||||
use crate::syntax::statement::VisibilityContext;
|
||||
use crate::syntax::token;
|
||||
use crate::syntax::tree;
|
||||
@ -79,7 +79,7 @@ pub fn try_parse_type_def<'s>(
|
||||
}
|
||||
|
||||
fn parse_type_body_statement<'s>(
|
||||
prefixes: &mut Vec<Line<'s, StatementPrefix<'s>>>,
|
||||
prefixes: &mut StatementPrefixes<'s>,
|
||||
mut line: item::Line<'s>,
|
||||
precedence: &mut Precedence<'s>,
|
||||
args_buffer: &mut Vec<ArgumentDefinition<'s>>,
|
||||
@ -121,7 +121,8 @@ fn parse_type_body_statement<'s>(
|
||||
tree::Variant::Function(_)
|
||||
| tree::Variant::ForeignFunction(_)
|
||||
| tree::Variant::Assignment(_)
|
||||
| tree::Variant::Documented(_)
|
||||
| tree::Variant::Documentation(_)
|
||||
| tree::Variant::ExpressionStatement(_)
|
||||
| tree::Variant::Annotation(_)
|
||||
| tree::Variant::AnnotatedBuiltin(_) => None,
|
||||
tree::Variant::TypeSignatureDeclaration(_) => None,
|
||||
|
@ -206,6 +206,8 @@ macro_rules! with_ast_definition { ($f:ident ($($args:tt)*)) => { $f! { $($args)
|
||||
},
|
||||
/// A variable assignment, like `foo = bar 23`.
|
||||
Assignment {
|
||||
/// Documentation applied to the variable.
|
||||
pub doc_line: Option<DocLine<'s>>,
|
||||
/// The pattern which should be unified with the expression.
|
||||
pub pattern: Tree<'s>,
|
||||
/// The `=` token.
|
||||
@ -215,6 +217,8 @@ macro_rules! with_ast_definition { ($f:ident ($($args:tt)*)) => { $f! { $($args)
|
||||
},
|
||||
/// A function definition, like `add x y = x + y`.
|
||||
Function {
|
||||
/// Documentation applied to the function.
|
||||
pub doc_line: Option<DocLine<'s>>,
|
||||
/// Annotations applied to the function.
|
||||
pub annotation_lines: Vec<AnnotationLine<'s>>,
|
||||
/// A type signature for the function, on its own line.
|
||||
@ -326,15 +330,21 @@ macro_rules! with_ast_definition { ($f:ident ($($args:tt)*)) => { $f! { $($args)
|
||||
pub newlines: Vec<token::Newline<'s>>,
|
||||
pub expression: Option<Tree<'s>>,
|
||||
},
|
||||
/// An expression preceded by a doc comment.
|
||||
Documented {
|
||||
/// The documentation.
|
||||
pub documentation: DocComment<'s>,
|
||||
/// The item being documented.
|
||||
pub expression: Option<Tree<'s>>,
|
||||
/// A documentation comment that wasn't attached to a following documentable item.
|
||||
Documentation {
|
||||
pub docs: DocComment<'s>,
|
||||
},
|
||||
/// An expression at the top level of a block.
|
||||
ExpressionStatement {
|
||||
/// Documentation applied to the expression.
|
||||
pub doc_line: Option<DocLine<'s>>,
|
||||
/// The expression.
|
||||
pub expression: Tree<'s>,
|
||||
},
|
||||
/// Defines a type constructor.
|
||||
ConstructorDefinition {
|
||||
/// Documentation applied to the constructor.
|
||||
pub doc_line: Option<DocLine<'s>>,
|
||||
/// Annotations applied to the constructor.
|
||||
pub annotation_lines: Vec<AnnotationLine<'s>>,
|
||||
/// The `private` keyword, if present.
|
||||
@ -486,7 +496,23 @@ impl<'s> span::Builder<'s> for TextElement<'s> {
|
||||
|
||||
// === Documentation ===
|
||||
|
||||
/// A documentation comment.
|
||||
/// A documentation comment line.
|
||||
#[cfg_attr(feature = "debug", derive(Visitor))]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Reflect, Deserialize)]
|
||||
pub struct DocLine<'s> {
|
||||
/// The documentation.
|
||||
pub docs: DocComment<'s>,
|
||||
/// Empty lines between the comment and the item.
|
||||
pub newlines: Vec<token::Newline<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> span::Builder<'s> for DocLine<'s> {
|
||||
fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> {
|
||||
span.add(&mut self.docs).add(&mut self.newlines)
|
||||
}
|
||||
}
|
||||
|
||||
/// Contents of a documentation comment.
|
||||
#[cfg_attr(feature = "debug", derive(Visitor))]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Reflect, Deserialize)]
|
||||
pub struct DocComment<'s> {
|
||||
@ -494,13 +520,11 @@ pub struct DocComment<'s> {
|
||||
pub open: token::TextStart<'s>,
|
||||
/// The documentation text.
|
||||
pub elements: Vec<TextElement<'s>>,
|
||||
/// Empty lines between the comment and the item.
|
||||
pub newlines: Vec<token::Newline<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> span::Builder<'s> for DocComment<'s> {
|
||||
fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> {
|
||||
span.add(&mut self.open).add(&mut self.elements).add(&mut self.newlines)
|
||||
span.add(&mut self.open).add(&mut self.elements)
|
||||
}
|
||||
}
|
||||
|
||||
@ -728,20 +752,20 @@ impl<'s> span::Builder<'s> for CaseLine<'s> {
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Reflect, Deserialize)]
|
||||
pub struct Case<'s> {
|
||||
/// Documentation, if present.
|
||||
pub documentation: Option<DocComment<'s>>,
|
||||
pub doc_line: Option<DocLine<'s>>,
|
||||
/// The pattern being matched. It is an error for this to be absent.
|
||||
pub pattern: Option<Tree<'s>>,
|
||||
pub pattern: Option<Tree<'s>>,
|
||||
/// Token.
|
||||
pub arrow: Option<token::ArrowOperator<'s>>,
|
||||
pub arrow: Option<token::ArrowOperator<'s>>,
|
||||
/// The expression associated with the pattern. It is an error for this to be empty.
|
||||
pub expression: Option<Tree<'s>>,
|
||||
pub expression: Option<Tree<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> Case<'s> {
|
||||
/// Return a mutable reference to the `left_offset` of this object (which will actually belong
|
||||
/// to one of the object's children, if it has any).
|
||||
pub fn left_offset_mut(&mut self) -> Option<&mut Offset<'s>> {
|
||||
None.or_else(|| self.documentation.as_mut().map(|t| &mut t.open.left_offset))
|
||||
None.or_else(|| self.doc_line.as_mut().map(|t| &mut t.docs.open.left_offset))
|
||||
.or_else(|| self.pattern.as_mut().map(|t| &mut t.span.left_offset))
|
||||
.or_else(|| self.arrow.as_mut().map(|t| &mut t.left_offset))
|
||||
.or_else(|| self.expression.as_mut().map(|e| &mut e.span.left_offset))
|
||||
@ -750,7 +774,7 @@ impl<'s> Case<'s> {
|
||||
|
||||
impl<'s> span::Builder<'s> for Case<'s> {
|
||||
fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> {
|
||||
span.add(&mut self.documentation)
|
||||
span.add(&mut self.doc_line)
|
||||
.add(&mut self.pattern)
|
||||
.add(&mut self.arrow)
|
||||
.add(&mut self.expression)
|
||||
|
@ -141,7 +141,6 @@ where I: Iterator<Item = Line<'s>>
|
||||
#[derive(Debug)]
|
||||
enum Prefix<'s> {
|
||||
BuiltinAnnotation { node: Box<AnnotatedBuiltin<'s>>, span: Span<'s> },
|
||||
Documentation { node: Box<Documented<'s>>, span: Span<'s> },
|
||||
}
|
||||
|
||||
impl<'s> TryFrom<Tree<'s>> for Prefix<'s> {
|
||||
@ -150,7 +149,6 @@ impl<'s> TryFrom<Tree<'s>> for Prefix<'s> {
|
||||
match tree.variant {
|
||||
Variant::AnnotatedBuiltin(node) if node.expression.is_none() =>
|
||||
Ok(Prefix::BuiltinAnnotation { node, span: tree.span }),
|
||||
Variant::Documented(node) => Ok(Prefix::Documentation { node, span: tree.span }),
|
||||
_ => Err(tree),
|
||||
}
|
||||
}
|
||||
@ -160,7 +158,6 @@ impl<'s> Prefix<'s> {
|
||||
fn push_newline(&mut self, newline: token::Newline<'s>) {
|
||||
let (newlines, span) = match self {
|
||||
Prefix::BuiltinAnnotation { node, span } => (&mut node.newlines, span),
|
||||
Prefix::Documentation { node, span } => (&mut node.documentation.newlines, span),
|
||||
};
|
||||
span.code_length += newline.left_offset.code.length() + newline.code.length();
|
||||
newlines.push(newline);
|
||||
@ -169,7 +166,6 @@ impl<'s> Prefix<'s> {
|
||||
fn apply_to(mut self, expression: Tree<'s>) -> Tree<'s> {
|
||||
let (expr, span) = match &mut self {
|
||||
Prefix::BuiltinAnnotation { node, span } => (&mut node.expression, span),
|
||||
Prefix::Documentation { node, span } => (&mut node.expression, span),
|
||||
};
|
||||
span.code_length += expression.span.left_offset.code.length() + expression.span.code_length;
|
||||
*expr = Some(expression);
|
||||
@ -182,8 +178,6 @@ impl<'s> From<Prefix<'s>> for Tree<'s> {
|
||||
match prefix {
|
||||
Prefix::BuiltinAnnotation { node, span } =>
|
||||
Tree { variant: Variant::AnnotatedBuiltin(node), span, warnings: default() },
|
||||
Prefix::Documentation { node, span } =>
|
||||
Tree { variant: Variant::Documented(node), span, warnings: default() },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -287,8 +287,7 @@ impl<'s> TextLiteralBuilder<'s> {
|
||||
let Self { open, newline, elements, has_preceding_item } = self;
|
||||
if open.code.starts_with('#') {
|
||||
assert_eq!(newline, None);
|
||||
let doc = syntax::tree::DocComment { open, elements, newlines: default() };
|
||||
let tree = Tree::documented(doc, default());
|
||||
let tree = Tree::documentation(syntax::tree::DocComment { open, elements });
|
||||
let error =
|
||||
has_preceding_item.then_some(SyntaxError::DocumentationUnexpectedNonInitial);
|
||||
maybe_with_error(tree, error)
|
||||
|
@ -57,6 +57,12 @@ impl<T> NonEmptyVec<T> {
|
||||
Self { elems }
|
||||
}
|
||||
|
||||
/// Construct a new `NonEmptyVec<T>` from a `Vec<T>` and an element `last`.
|
||||
pub fn from_vec_and_last(mut vec: Vec<T>, last: T) -> NonEmptyVec<T> {
|
||||
vec.push(last);
|
||||
Self { elems: vec }
|
||||
}
|
||||
|
||||
/// Construct a new, `NonEmptyVec<T>` containing the provided element and with the
|
||||
/// provided `capacity`.
|
||||
///
|
||||
|
Loading…
Reference in New Issue
Block a user