Merge remote-tracking branch 'origin/develop' into wip/mwu/enso-installer

This commit is contained in:
Michał W. Urbańczyk 2024-05-07 13:14:04 +02:00
commit 71fd56460d
107 changed files with 2337 additions and 1317 deletions

View File

@ -71,7 +71,6 @@
"rimraf": "^5.0.5",
"semver": "^7.5.4",
"sucrase": "^3.34.0",
"verte-vue3": "^1.1.1",
"vue": "^3.4.19",
"ws": "^8.13.0",
"y-codemirror.next": "^0.3.2",

View File

@ -79,6 +79,14 @@ export class Resumable<T> {
this.current = this.iterator.next()
}
peek() {
return this.current.done ? undefined : this.current.value
}
advance() {
this.current = this.iterator.next()
}
/** The given function peeks at the current value. If the function returns `true`, the current value will be advanced
* and the function called again; if it returns `false`, the peeked value remains current and `advanceWhile` returns.
*/

View File

@ -1,15 +1,19 @@
<script setup lang="ts">
import ColorRing from '@/components/ColorRing.vue'
import type { NodeCreationOptions } from '@/components/GraphEditor/nodeCreation'
import SmallPlusButton from '@/components/SmallPlusButton.vue'
import SvgIcon from '@/components/SvgIcon.vue'
import ToggleIcon from '@/components/ToggleIcon.vue'
import { ref } from 'vue'
const nodeColor = defineModel<string | undefined>('nodeColor')
const props = defineProps<{
isRecordingEnabledGlobally: boolean
isRecordingOverridden: boolean
isDocsVisible: boolean
isVisualizationVisible: boolean
isFullMenuVisible: boolean
visibleNodeColors: Set<string>
}>()
const emit = defineEmits<{
'update:isRecordingOverridden': [isRecordingOverridden: boolean]
@ -20,13 +24,18 @@ const emit = defineEmits<{
openFullMenu: []
delete: []
createNodes: [options: NodeCreationOptions[]]
toggleColorPicker: []
}>()
const showColorPicker = ref(false)
</script>
<template>
<div class="CircularMenu" @pointerdown.stop @pointerup.stop @click.stop>
<div class="circle" :class="`${props.isFullMenuVisible ? 'full' : 'partial'}`">
<div
v-if="!showColorPicker"
class="circle menu"
:class="`${props.isFullMenuVisible ? 'full' : 'partial'}`"
>
<div v-if="!isFullMenuVisible" class="More" @pointerdown.stop="emit('openFullMenu')"></div>
<SvgIcon
v-if="isFullMenuVisible"
@ -40,7 +49,7 @@ const emit = defineEmits<{
name="paint_palette"
class="icon-container button slot3"
:alt="`Choose color`"
@click.stop="emit('toggleColorPicker')"
@click.stop="showColorPicker = true"
/>
<SvgIcon
v-if="isFullMenuVisible"
@ -72,6 +81,13 @@ const emit = defineEmits<{
@update:modelValue="emit('update:isRecordingOverridden', $event)"
/>
</div>
<div v-if="showColorPicker" class="circle">
<ColorRing
v-model="nodeColor"
:matchableColors="visibleNodeColors"
@close="showColorPicker = false"
/>
</div>
<SmallPlusButton
v-if="!isVisualizationVisible"
class="below-slot5"
@ -85,15 +101,24 @@ const emit = defineEmits<{
position: absolute;
user-select: none;
pointer-events: none;
/* This is a variable so that it can be referenced in computations,
but currently it can't be changed due to many hard-coded values below. */
--outer-diameter: 104px;
--full-ring-path: path(
evenodd,
'M0,52 A52,52 0,1,1 104,52 A52,52 0,1,1 0, 52 z m52,20 A20,20 0,1,1 52,32 20,20 0,1,1 52,72 z'
);
}
.circle {
position: relative;
left: -36px;
top: -36px;
width: 114px;
height: 114px;
width: var(--outer-diameter);
height: var(--outer-diameter);
}
.circle.menu {
> * {
pointer-events: all;
}
@ -118,10 +143,7 @@ const emit = defineEmits<{
}
&.full {
&:before {
clip-path: path(
evenodd,
'M0,52 A52,52 0,1,1 104,52 A52,52 0,1,1 0, 52 z m52,20 A20,20 0,1,1 52,32 20,20 0,1,1 52,72 z'
);
clip-path: var(--full-ring-path);
}
}
}
@ -153,6 +175,10 @@ const emit = defineEmits<{
}
}
:deep(.ColorRing .gradient) {
clip-path: var(--full-ring-path);
}
.icon-container {
display: inline-flex;
background: none;
@ -220,7 +246,7 @@ const emit = defineEmits<{
.below-slot5 {
position: absolute;
top: calc(108px - 36px);
top: calc(var(--outer-diameter) - 32px);
pointer-events: all;
}

View File

@ -1,46 +0,0 @@
<script setup lang="ts">
import { convertToRgb } from '@/util/colors'
// @ts-ignore
import Verte from 'verte-vue3'
import 'verte-vue3/dist/verte.css'
import { computed, nextTick, ref, watch } from 'vue'
const props = defineProps<{ show: boolean; color: string }>()
const emit = defineEmits<{ 'update:color': [string] }>()
/** Comparing RGB colors is complicated, because the string representation always has some minor differences.
* In this particular case, we remove spaces to match the format used by `verte-vue3`. */
const normalizedColor = computed(() => {
return convertToRgb(props.color)?.replaceAll(/\s/g, '') ?? ''
})
const updateColor = (c: string) => {
if (props.show && normalizedColor.value !== c) emit('update:color', c)
}
/** Looks weird, but it is a fix for vertes bug: https://github.com/baianat/verte/issues/52. */
const key = ref(0)
watch(
() => props.show,
() => nextTick(() => key.value++),
)
</script>
<template>
<Verte
v-show="props.show"
:key="key"
:modelValue="convertToRgb(props.color)"
picker="square"
model="rgb"
display="widget"
:draggable="false"
:enableAlpha="false"
@update:modelValue="updateColor"
@pointerdown.stop
@pointerup.stop
@click.stop
/>
</template>
<style></style>

View File

@ -0,0 +1,193 @@
<script setup lang="ts">
import {
cssAngularColorStop,
gradientPoints,
rangesForInputs,
} from '@/components/ColorRing/gradient'
import { injectInteractionHandler } from '@/providers/interactionHandler'
import { targetIsOutside } from '@/util/autoBlur'
import { cssSupported, ensoColor, formatCssColor, parseCssColor } from '@/util/colors'
import { Rect } from '@/util/data/rect'
import { Vec2 } from '@/util/data/vec2'
import { computed, onMounted, ref } from 'vue'
/**
* Hue picker
*
* # Angles
*
* All angles are measured in turns, starting from the 12-o'clock position, normalized to the range 0-1, unless
* otherwise specified.
* - This is the axis used by CSS gradients (adjustment is necessary when working with trigonometric functions, which
* start from the positive x-axis).
* - Turns allow constants to be expressed as simple numbers, and can be easily converted to the units used by external
* APIs (radians for math, degrees for culori).
*/
// If the browser doesn't support OKLCH gradient interpolation, the gradient will be specified by computing the number
// of points specified here in OKLCH, converting to sRGB if the browser doesn't support OKLCH colors at all, and
// interpolating in sRGB. This number has been found to be enough to look close to the intended colors, without
// excessive gradient complexity (which may affect performance).
const NONNATIVE_OKLCH_INTERPOLATION_STEPS = 12
const FIXED_RANGE_WIDTH = 1 / 16
const selectedColor = defineModel<string | undefined>()
const props = defineProps<{
matchableColors: Set<string>
}>()
const emit = defineEmits<{
close: []
}>()
const browserSupportsOklchInterpolation = cssSupported(
'background-image: conic-gradient(in oklch increasing hue, red, blue)',
)
const svgElement = ref<HTMLElement>()
const interaction = injectInteractionHandler()
onMounted(() => {
interaction.setCurrent({
cancel: () => emit('close'),
pointerdown: (e: PointerEvent) => {
if (targetIsOutside(e, svgElement.value)) emit('close')
return false
},
})
})
const mouseSelectedAngle = ref<number>()
const triangleAngle = computed(() => {
if (mouseSelectedAngle.value) return mouseSelectedAngle.value
if (selectedColor.value) {
const color = parseCssColor(selectedColor.value)
if (color?.h) return color.h / 360
}
return undefined
})
function cssColor(hue: number) {
return formatCssColor(ensoColor(hue))
}
// === Events ===
function eventAngle(event: MouseEvent) {
if (!svgElement.value) return 0
const origin = Rect.FromDomRect(svgElement.value.getBoundingClientRect()).center()
const offset = Vec2.FromXY(event).sub(origin)
return Math.atan2(offset.y, offset.x) / (2 * Math.PI) + 0.25
}
function ringHover(event: MouseEvent) {
mouseSelectedAngle.value = eventAngle(event)
}
function ringClick(event: MouseEvent) {
mouseSelectedAngle.value = eventAngle(event)
if (triangleHue.value != null) selectedColor.value = cssColor(triangleHue.value)
emit('close')
}
// === Gradient colors ===
const fixedRanges = computed(() => {
const inputHues = new Set<number>()
for (const rawColor of props.matchableColors) {
if (rawColor === selectedColor.value) continue
const color = parseCssColor(rawColor)
const hueDeg = color?.h
if (hueDeg == null) continue
const hue = hueDeg / 360
inputHues.add(hue < 0 ? hue + 1 : hue)
}
return rangesForInputs(inputHues, FIXED_RANGE_WIDTH / 2)
})
const triangleHue = computed(() => {
const target = triangleAngle.value
if (target == null) return undefined
for (const range of fixedRanges.value) {
if (target < range.start) break
if (target <= range.end) return range.hue
}
return target
})
// === CSS ===
const cssGradient = computed(() => {
const points = gradientPoints(
fixedRanges.value,
browserSupportsOklchInterpolation ? 2 : NONNATIVE_OKLCH_INTERPOLATION_STEPS,
)
const angularColorStopList = Array.from(points, cssAngularColorStop)
const colorStops = angularColorStopList.join(',')
return browserSupportsOklchInterpolation ?
`conic-gradient(in oklch increasing hue,${colorStops})`
: `conic-gradient(${colorStops})`
})
const cssTriangleAngle = computed(() =>
triangleAngle.value != null ? `${triangleAngle.value}turn` : undefined,
)
const cssTriangleColor = computed(() =>
triangleHue.value != null ? cssColor(triangleHue.value) : undefined,
)
</script>
<template>
<div class="ColorRing">
<svg v-if="cssTriangleAngle != null" class="svg" viewBox="-2 -2 4 4">
<polygon class="triangle" points="0,-1 -0.4,-1.35 0.4,-1.35" />
</svg>
<div
ref="svgElement"
class="gradient"
@pointerleave="mouseSelectedAngle = undefined"
@pointermove="ringHover"
@click.stop="ringClick"
@pointerdown.stop
@pointerup.stop
/>
</div>
</template>
<style scoped>
.ColorRing {
position: relative;
pointer-events: none;
width: 100%;
height: 100%;
}
.svg {
position: absolute;
margin: -50%;
}
.gradient {
position: absolute;
inset: 0;
pointer-events: auto;
margin-top: auto;
background: v-bind('cssGradient');
cursor: crosshair;
border-radius: var(--radius-full);
animation: grow 0.1s forwards;
}
@keyframes grow {
from {
transform: scale(0);
}
to {
transform: scale(1);
}
}
.triangle {
transform: rotate(v-bind('cssTriangleAngle'));
fill: v-bind('cssTriangleColor');
}
</style>

View File

@ -0,0 +1,100 @@
import { fc, test as fcTest } from '@fast-check/vitest'
import { expect } from 'vitest'
import type { FixedRange, GradientPoint } from '../gradient'
import { gradientPoints, normalizeHue, rangesForInputs } from '../gradient'
/** Check value ranges and internal consistency. */
function validateRange({ start, end }: FixedRange) {
expect(start).toBeGreaterThanOrEqual(0)
expect(start).toBeLessThan(1)
expect(end).toBeGreaterThan(0)
expect(end).toBeLessThanOrEqual(1)
expect(end).toBeGreaterThan(start)
}
/** Check value ranges and internal consistency. */
function validateGradientPoint({ hue, angle, angle2 }: GradientPoint) {
expect(hue).toBeGreaterThanOrEqual(0)
expect(hue).toBeLessThanOrEqual(1)
expect(angle).toBeGreaterThanOrEqual(0)
expect(angle).toBeLessThanOrEqual(1)
if (angle2 != null) {
expect(angle2).toBeGreaterThanOrEqual(0)
expect(angle2).toBeLessThanOrEqual(1)
expect(angle).toBeLessThanOrEqual(angle2)
} else {
expect(hue).toBe(angle)
}
}
interface AngularStop {
hue: number
angle: number
}
function angularStops(points: Iterable<GradientPoint>) {
const stops = new Array<AngularStop>()
for (const { hue, angle, angle2 } of points) {
stops.push({ hue, angle })
if (angle2 != null) stops.push({ hue, angle: angle2 })
}
return stops
}
function stopSpans(stops: Iterable<AngularStop>, radius: number) {
const spans = new Array<{ start: number; end: number; hue: number }>()
let prev: AngularStop | undefined = undefined
for (const stop of stops) {
if (prev && stop.angle !== prev.angle) {
expect(stop.angle).toBeGreaterThanOrEqual(prev.angle)
expect(stop.hue).toBeGreaterThanOrEqual(prev.hue)
if (stop.hue === prev.hue) {
spans.push({ start: prev.angle, end: stop.angle, hue: stop.hue })
} else {
expect(stop.hue).toBe(stop.angle)
expect(prev.hue).toBe(prev.angle)
}
}
prev = stop
}
const first = spans[0]
const last = spans[spans.length - 1]
if (spans.length >= 2 && first && last && normalizeHue(first.hue) === normalizeHue(last.hue)) {
expect(first.start).toBe(0)
expect(last.end).toBe(1)
spans.pop()
first.start = last.end
}
return spans
}
function testGradients({ hues, radius }: { hues: number[]; radius: number }) {
const approximate = (n: number) => normalizeHue(Math.round(n * 2 ** 20) / 2 ** 20)
const approximateHues = new Set(hues.map(approximate))
const ranges = rangesForInputs(approximateHues, radius)
ranges.forEach(validateRange)
const points = gradientPoints(ranges, 2)
points.forEach(validateGradientPoint)
const stops = angularStops(points)
expect(stops[0]?.angle).toBe(0)
expect(stops[stops.length - 1]?.angle).toBe(1)
const spans = stopSpans(stops, radius)
for (const span of spans) {
expect(approximateHues).toContain(approximate(span.hue))
if (span.start < span.end) {
expect(span.hue === 0 ? 1 : span.hue).toBeGreaterThan(span.start)
expect(span.hue).toBeLessThanOrEqual(span.end)
expect(span.end - span.start).toBeLessThan(radius * 2 + 0.0000001)
} else {
expect(span.hue > span.start || span.hue < span.end)
expect(1 - span.start + span.end).toBeLessThan(radius * 2 + 0.0000001)
}
}
expect(spans.length).toEqual(approximateHues.size)
}
fcTest.prop({
hues: fc.array(fc.float({ min: 0, max: 1, noNaN: true, maxExcluded: true })),
/* This parameter comes from configuration, so we don't need to test unrealistically small or large values that may
have their own edge cases. */
radius: fc.float({ min: Math.fround(0.01), max: 0.25, noNaN: true }),
})('CSS gradients', testGradients)

View File

@ -0,0 +1,115 @@
import { ensoColor, formatCssColor } from '@/util/colors'
import { Resumable } from 'shared/util/data/iterable'
export interface FixedRange {
start: number
end: number
hue: number
meetsPreviousRange: boolean
meetsNextRange: boolean
}
/** Returns inputs sorted, deduplicated, with values near the end wrapped. */
function normalizeRangeInputs(inputs: Iterable<number>, radius: number) {
const sortedInputs = [...inputs].sort((a, b) => a - b)
const normalizedInputs = new Set<number>()
const firstInput = sortedInputs[0]
const lastInput = sortedInputs[sortedInputs.length - 1]
if (lastInput != null && lastInput + radius > 1) normalizedInputs.add(lastInput - 1)
sortedInputs.forEach((value) => normalizedInputs.add(value))
if (firstInput != null && firstInput < radius) normalizedInputs.add(firstInput + 1)
return normalizedInputs
}
export function normalizeHue(value: number) {
return ((value % 1) + 1) % 1
}
export function seminormalizeHue(value: number) {
return value === 1 ? 1 : normalizeHue(value)
}
export function rangesForInputs(inputs: Iterable<number>, radius: number): FixedRange[] {
if (radius === 0) return []
const ranges = new Array<FixedRange & { rawHue: number }>()
const normalizedInputs = normalizeRangeInputs(inputs, radius)
for (const hue of normalizedInputs) {
const preferredStart = Math.max(hue - radius, 0)
const preferredEnd = Math.min(hue + radius, 1)
const prev = ranges[ranges.length - 1]
if (prev && preferredStart < prev.end) {
let midpoint = (prev.rawHue + hue) / 2
if (midpoint >= 1) continue
let meetsPreviousRange = true
if (midpoint <= 0) {
ranges.pop()
midpoint = 0
meetsPreviousRange = false
} else {
prev.end = midpoint
prev.meetsNextRange = true
}
ranges.push({
start: midpoint,
end: preferredEnd,
rawHue: hue,
hue: seminormalizeHue(hue),
meetsPreviousRange,
meetsNextRange: false,
})
} else {
const meetsPreviousRange = prev !== undefined && preferredStart < prev.end
if (meetsPreviousRange) prev.meetsNextRange = true
ranges.push({
start: preferredStart,
end: preferredEnd,
rawHue: hue,
hue: seminormalizeHue(hue),
meetsPreviousRange,
meetsNextRange: false,
})
}
}
const first = ranges[0]
const last = ranges[ranges.length - 1]
if (ranges.length >= 2 && first?.start === 0 && last?.end === 1) {
first.meetsPreviousRange = true
last.meetsNextRange = true
}
return ranges
}
export interface GradientPoint {
hue: number
angle: number
angle2?: number
}
export function cssAngularColorStop({ hue, angle, angle2 }: GradientPoint) {
return [
formatCssColor(ensoColor(hue)),
`${angle}turn`,
...(angle2 != null ? [`${angle}turn`] : []),
].join(' ')
}
export function gradientPoints(
inputRanges: Iterable<FixedRange>,
minStops: number,
): GradientPoint[] {
const points = new Array<GradientPoint>()
const interpolationPoint = (angle: number) => ({ hue: angle, angle })
const fixedRangeIter = new Resumable(inputRanges)
for (let i = 0; i < minStops; i++) {
const angle = i / (minStops - 1)
fixedRangeIter.advanceWhile((range) => range.end < angle)
const nextFixedRange = fixedRangeIter.peek()
if (!nextFixedRange || nextFixedRange.start > angle) points.push(interpolationPoint(angle))
}
for (const { start, end, hue, meetsPreviousRange, meetsNextRange } of inputRanges) {
if (!meetsPreviousRange) points.push(interpolationPoint(start))
points.push({ hue, angle: start, angle2: end })
if (!meetsNextRange) points.push(interpolationPoint(end))
}
points.sort((a, b) => a.angle - b.angle)
return points
}

View File

@ -1,7 +1,6 @@
<script setup lang="ts">
import { codeEditorBindings, graphBindings, interactionBindings } from '@/bindings'
import CodeEditor from '@/components/CodeEditor.vue'
import ColorPicker from '@/components/ColorPicker.vue'
import ComponentBrowser from '@/components/ComponentBrowser.vue'
import { type Usage } from '@/components/ComponentBrowser/input'
import { usePlacement } from '@/components/ComponentBrowser/placement'
@ -22,6 +21,7 @@ import { useNavigatorStorage } from '@/composables/navigatorStorage'
import type { PlacementStrategy } from '@/composables/nodeCreation'
import { useStackNavigator } from '@/composables/stackNavigator'
import { provideGraphNavigator } from '@/providers/graphNavigator'
import { provideNodeColors } from '@/providers/graphNodeColors'
import { provideNodeCreation } from '@/providers/graphNodeCreation'
import { provideGraphSelection } from '@/providers/graphSelection'
import { provideInteractionHandler } from '@/providers/interactionHandler'
@ -240,17 +240,14 @@ const graphBindingsHandler = graphBindings.handler({
stackNavigator.exitNode()
},
changeColorSelectedNodes() {
toggleColorPicker()
showColorPicker.value = true
},
})
const { handleClick } = useDoubleClick(
(e: MouseEvent) => {
graphBindingsHandler(e)
if (document.activeElement instanceof HTMLElement) {
document.activeElement.blur()
}
showColorPicker.value = false
clearFocus()
},
() => {
stackNavigator.exitNode()
@ -499,49 +496,18 @@ async function handleFileDrop(event: DragEvent) {
// === Color Picker ===
/** A small offset to keep the color picker slightly away from the nodes. */
const COLOR_PICKER_X_OFFSET_PX = -300
const showColorPicker = ref(false)
const colorPickerSelectedColor = ref('')
function overrideSelectedNodesColor(color: string) {
;[...nodeSelection.selected].map((id) => graphStore.overrideNodeColor(id, color))
}
/** Toggle displaying of the color picker. It will change colors of selected nodes. */
function toggleColorPicker() {
if (nodeSelection.selected.size === 0) {
showColorPicker.value = false
return
}
showColorPicker.value = !showColorPicker.value
if (showColorPicker.value) {
const oneOfSelected = set.first(nodeSelection.selected)
const color = graphStore.db.getNodeColorStyle(oneOfSelected)
if (color.startsWith('var') && viewportNode.value != null) {
// Some colors are defined in CSS variables, we need to get the actual color.
const variableName = color.slice(4, -1)
colorPickerSelectedColor.value = getComputedStyle(viewportNode.value).getPropertyValue(
variableName,
)
} else {
colorPickerSelectedColor.value = color
}
}
}
const colorPickerPos = computed(() => {
const nodeRects = [...nodeSelection.selected].map(
(id) => graphStore.nodeRects.get(id) ?? Rect.Zero,
)
const boundingRect = Rect.Bounding(...nodeRects)
return new Vec2(boundingRect.left + COLOR_PICKER_X_OFFSET_PX, boundingRect.center().y)
})
const colorPickerStyle = computed(() =>
colorPickerPos.value != null ?
{ transform: `translate(${colorPickerPos.value.x}px, ${colorPickerPos.value.y}px)` }
: {},
provideNodeColors((variable) =>
viewportNode.value ? getComputedStyle(viewportNode.value).getPropertyValue(variable) : '',
)
const showColorPicker = ref(false)
function setSelectedNodesColor(color: string) {
graphStore.transact(() =>
nodeSelection.selected.forEach((id) => graphStore.overrideNodeColor(id, color)),
)
}
const groupColors = computed(() => {
const styles: { [key: string]: string } = {}
for (let group of suggestionDb.groups) {
@ -569,15 +535,7 @@ const groupColors = computed(() => {
@nodeOutputPortDoubleClick="handleNodeOutputPortDoubleClick"
@nodeDoubleClick="(id) => stackNavigator.enterNode(id)"
@createNodes="createNodesFromSource"
@toggleColorPicker="toggleColorPicker"
/>
<ColorPicker
class="colorPicker"
:style="colorPickerStyle"
:show="showColorPicker"
:color="colorPickerSelectedColor"
@update:color="overrideSelectedNodesColor"
@setNodeColor="setSelectedNodesColor"
/>
</div>
<div
@ -598,6 +556,7 @@ const groupColors = computed(() => {
/>
<TopBar
v-model:recordMode="projectStore.recordMode"
v-model:showColorPicker="showColorPicker"
:breadcrumbs="stackNavigator.breadcrumbLabels.value"
:allowNavigationLeft="stackNavigator.allowNavigationLeft.value"
:allowNavigationRight="stackNavigator.allowNavigationRight.value"
@ -612,7 +571,7 @@ const groupColors = computed(() => {
@zoomOut="graphNavigator.stepZoom(-1)"
@toggleCodeEditor="toggleCodeEditor"
@collapseNodes="collapseNodes"
@toggleColorPicker="toggleColorPicker"
@setNodeColor="setSelectedNodesColor"
@removeNodes="deleteSelected"
/>
<PlusButton @pointerdown.stop @click.stop="addNodeAuto()" @pointerup.stop />
@ -646,8 +605,4 @@ const groupColors = computed(() => {
width: 0;
height: 0;
}
.colorPicker {
position: absolute;
}
</style>

View File

@ -571,6 +571,7 @@ const connected = computed(() => isConnected(props.edge))
stroke-width: 14;
stroke: transparent;
pointer-events: stroke;
cursor: pointer;
}
.edge.visible {
stroke-width: 4;

View File

@ -9,16 +9,17 @@ import GraphNodeMessage, {
} from '@/components/GraphEditor/GraphNodeMessage.vue'
import GraphNodeSelection from '@/components/GraphEditor/GraphNodeSelection.vue'
import GraphVisualization from '@/components/GraphEditor/GraphVisualization.vue'
import type { NodeCreationOptions } from '@/components/GraphEditor/nodeCreation'
import NodeWidgetTree, {
GRAB_HANDLE_X_MARGIN,
ICON_WIDTH,
} from '@/components/GraphEditor/NodeWidgetTree.vue'
import type { NodeCreationOptions } from '@/components/GraphEditor/nodeCreation'
import SvgIcon from '@/components/SvgIcon.vue'
import { useApproach } from '@/composables/animation'
import { useDoubleClick } from '@/composables/doubleClick'
import { usePointer, useResizeObserver } from '@/composables/events'
import { injectGraphNavigator } from '@/providers/graphNavigator'
import { injectNodeColors } from '@/providers/graphNodeColors'
import { injectGraphSelection } from '@/providers/graphSelection'
import { useGraphStore, type Node } from '@/stores/graph'
import { asNodeId } from '@/stores/graph/graphDatabase'
@ -58,7 +59,7 @@ const emit = defineEmits<{
outputPortDoubleClick: [event: PointerEvent, portId: AstId]
doubleClick: []
createNodes: [options: NodeCreationOptions[]]
toggleColorPicker: []
setNodeColor: [color: string]
'update:edited': [cursorPosition: number]
'update:rect': [rect: Rect]
'update:visualizationId': [id: Opt<VisualizationIdentifier>]
@ -405,7 +406,14 @@ watchEffect(() => {
const scope = effectScope(true)
const approach = scope.run(() =>
useApproach(
() => (outputHovered.value === port || graph.unconnectedEdge?.target === port ? 1 : 0),
() =>
(
outputHovered.value === port ||
graph.unconnectedEdge?.target === port ||
selectionVisible.value
) ?
1
: 0,
50,
0.01,
),
@ -446,6 +454,8 @@ const documentation = computed<string | undefined>({
})
},
})
const { getNodeColor, visibleNodeColors } = injectNodeColors()
</script>
<template>
@ -501,6 +511,8 @@ const documentation = computed<string | undefined>({
:isRecordingEnabledGlobally="projectStore.isRecordingEnabled"
:isVisualizationVisible="isVisualizationVisible"
:isFullMenuVisible="menuVisible && menuFull"
:nodeColor="getNodeColor(nodeId)"
:visibleNodeColors="visibleNodeColors"
@update:isVisualizationVisible="emit('update:visualizationVisible', $event)"
@startEditing="startEditingNode"
@startEditingComment="editingComment = true"
@ -509,7 +521,7 @@ const documentation = computed<string | undefined>({
@createNodes="emit('createNodes', $event)"
@pointerenter="menuHovered = true"
@pointerleave="menuHovered = false"
@toggleColorPicker="emit('toggleColorPicker')"
@update:nodeColor="emit('setNodeColor', $event)"
/>
<GraphVisualization
v-if="isVisualizationVisible"
@ -600,21 +612,19 @@ const documentation = computed<string | undefined>({
height: 100%;
position: absolute;
overflow: visible;
top: 0px;
left: 0px;
top: 0;
left: 0;
display: flex;
--output-port-max-width: 6px;
--output-port-overlap: 0.2px;
--output-port-hover-width: 8px;
--output-port-max-width: 4px;
--output-port-overlap: -8px;
--output-port-hover-width: 20px;
}
.outputPort,
.outputPortHoverArea {
x: calc(0px - var(--output-port-width) / 2);
y: calc(0px - var(--output-port-width) / 2);
width: calc(var(--node-width) + var(--output-port-width));
height: calc(var(--node-height) + var(--output-port-width));
rx: calc(var(--node-border-radius) + var(--output-port-width) / 2);
fill: none;
@ -638,13 +648,21 @@ const documentation = computed<string | undefined>({
--output-port-width: calc(
var(--output-port-max-width) * var(--hover-animation) - var(--output-port-overlap)
);
y: calc(0px - var(--output-port-width) / 2);
height: calc(var(--node-height) + var(--output-port-width));
pointer-events: none;
}
.outputPortHoverArea {
--output-port-width: var(--output-port-hover-width);
y: calc(
0px + var(--output-port-hover-width) / 2 + var(--output-port-overlap) / 2 + var(--node-height) /
2
);
height: calc(var(--node-height) / 2 + var(--output-port-hover-width) / 2);
stroke: transparent;
pointer-events: all;
cursor: pointer;
}
.portClip {

View File

@ -21,7 +21,7 @@ const emit = defineEmits<{
nodeOutputPortDoubleClick: [portId: AstId]
nodeDoubleClick: [nodeId: NodeId]
createNodes: [source: NodeId, options: NodeCreationOptions[]]
toggleColorPicker: []
setNodeColor: [color: string]
}>()
const projectStore = useProjectStore()
@ -63,7 +63,7 @@ const uploadingFiles = computed<[FileName, File][]>(() => {
@outputPortDoubleClick="(_event, port) => emit('nodeOutputPortDoubleClick', port)"
@doubleClick="emit('nodeDoubleClick', id)"
@createNodes="emit('createNodes', id, $event)"
@toggleColorPicker="emit('toggleColorPicker')"
@setNodeColor="emit('setNodeColor', $event)"
@update:edited="graphStore.setEditedNode(id, $event)"
@update:rect="graphStore.updateNodeRect(id, $event)"
@update:visualizationId="

View File

@ -1,10 +1,33 @@
<script setup lang="ts">
import ColorRing from '@/components/ColorRing.vue'
import SvgIcon from '@/components/SvgIcon.vue'
import ToggleIcon from '@/components/ToggleIcon.vue'
import { injectNodeColors } from '@/providers/graphNodeColors'
import { injectGraphSelection } from '@/providers/graphSelection'
import { computed } from 'vue'
const _props = defineProps<{
selectedComponents: number
const showColorPicker = defineModel<boolean>('showColorPicker', { required: true })
const _props = defineProps<{ selectedComponents: number }>()
const emit = defineEmits<{
collapseNodes: []
setNodeColor: [color: string]
removeNodes: []
}>()
const emit = defineEmits<{ collapseNodes: []; toggleColorPicker: []; removeNodes: [] }>()
const { getNodeColor, visibleNodeColors } = injectNodeColors()
const selection = injectGraphSelection(true)
const selectionColor = computed(() => {
if (!selection) return undefined
let color: string | undefined = undefined
for (const node of selection.selected) {
const nodeColor = getNodeColor(node)
if (nodeColor) {
if (color !== undefined && color !== nodeColor) return undefined
else color = nodeColor
}
}
return color
})
</script>
<template>
@ -19,12 +42,16 @@ const emit = defineEmits<{ collapseNodes: []; toggleColorPicker: []; removeNodes
alt="Group components"
@click.stop="emit('collapseNodes')"
/>
<SvgIcon
name="paint_palette"
draggable="false"
class="icon button"
alt="Change components' colors"
@click.stop="emit('toggleColorPicker')"
<ToggleIcon
v-model="showColorPicker"
:alt="`${showColorPicker ? 'Hide' : 'Show'} the component color chooser`"
icon="paint_palette"
class="toggle button"
:class="{
// Any `pointerdown` event outside the color picker will close it. Ignore clicks that occur while the color
// picker is open, so that it isn't toggled back open.
disableInput: showColorPicker,
}"
/>
<SvgIcon
name="trash"
@ -33,6 +60,14 @@ const emit = defineEmits<{ collapseNodes: []; toggleColorPicker: []; removeNodes
alt="Delete components"
@click.stop="emit('removeNodes')"
/>
<div v-if="showColorPicker" class="colorPickerContainer">
<ColorRing
:modelValue="selectionColor"
:matchableColors="visibleNodeColors"
@close="showColorPicker = false"
@update:modelValue="emit('setNodeColor', $event)"
/>
</div>
</div>
</template>
@ -50,4 +85,30 @@ const emit = defineEmits<{ collapseNodes: []; toggleColorPicker: []; removeNodes
padding-top: 4px;
padding-bottom: 4px;
}
.colorPickerContainer {
position: absolute;
top: 36px;
left: 0;
width: 240px;
height: 240px;
display: flex;
border-radius: var(--radius-default);
background: var(--color-frame-bg);
backdrop-filter: var(--blur-app-bg);
place-items: center;
padding: 36px;
}
.toggle {
opacity: 0.6;
}
.toggledOn {
opacity: unset;
}
.disableInput {
pointer-events: none;
}
</style>

View File

@ -7,6 +7,7 @@ import SelectionMenu from '@/components/SelectionMenu.vue'
import { injectGuiConfig } from '@/providers/guiConfig'
import { computed } from 'vue'
const showColorPicker = defineModel<boolean>('showColorPicker', { required: true })
const props = defineProps<{
breadcrumbs: BreadcrumbItem[]
recordMode: boolean
@ -26,7 +27,7 @@ const emit = defineEmits<{
zoomOut: []
toggleCodeEditor: []
collapseNodes: []
toggleColorPicker: []
setNodeColor: [color: string]
removeNodes: []
}>()
@ -60,10 +61,11 @@ const barStyle = computed(() => {
<Transition name="selection-menu">
<SelectionMenu
v-if="componentsSelected > 1"
v-model:showColorPicker="showColorPicker"
:selectedComponents="componentsSelected"
@collapseNodes="emit('collapseNodes')"
@toggleColorPicker="emit('toggleColorPicker')"
@removeNodes="emit('removeNodes')"
@setNodeColor="emit('setNodeColor', $event)"
/>
</Transition>
<ExtendedMenu

View File

@ -0,0 +1,30 @@
import { useGraphStore, type NodeId } from '@/stores/graph'
import { computed } from 'vue'
export function useNodeColors(getCssValue: (variable: string) => string) {
const graphStore = useGraphStore()
function getNodeColor(node: NodeId) {
const color = graphStore.db.getNodeColorStyle(node)
if (color.startsWith('var')) {
// Some colors are defined in CSS variables, we need to get the actual color.
const variableName = color.slice(4, -1)
const value = getCssValue(variableName)
if (value === '') return undefined
return value
} else {
return color
}
}
const visibleNodeColors = computed(() => {
const colors = new Set<string>()
for (const node of graphStore.db.nodeIds()) {
const color = getNodeColor(node)
if (color) colors.add(color)
}
return colors
})
return { getNodeColor, visibleNodeColors }
}

View File

@ -0,0 +1,5 @@
import { useNodeColors } from '@/composables/nodeColors'
import { createContextStore } from '@/providers'
export { injectFn as injectNodeColors, provideFn as provideNodeColors }
const { provideFn, injectFn } = createContextStore('node colors', useNodeColors)

View File

@ -263,6 +263,10 @@ export class GraphDb {
return this.bindings.identifierToBindingId.hasKey(ident)
}
nodeIds(): IterableIterator<NodeId> {
return this.nodeIdToNode.keys()
}
isKnownFunctionCall(id: AstId): boolean {
return this.getMethodCallInfo(id) != null
}

View File

@ -41,7 +41,7 @@ export function registerAutoBlurHandler() {
)
}
/** Returns true if the target of the event is in the DOM subtree of the given `area` element. */
/** Returns true if the target of the event is outside the DOM subtree of the given `area` element. */
export function targetIsOutside(e: Event, area: Opt<Element>): boolean {
return !!area && e.target instanceof Element && !area.contains(e.target)
}

View File

@ -1,38 +1,44 @@
import { converter, formatCss, formatRgb, modeOklch, modeRgb, useMode, type Oklch } from 'culori/fn'
import { v3 as hashString } from 'murmurhash'
export { type Oklch }
useMode(modeOklch)
useMode(modeRgb)
const rgbConverter = converter('rgb')
const oklch = converter('oklch')
export function convertToRgb(color: string): string | undefined {
const colorRgb = rgbConverter(color)
return colorRgb ? formatRgb(colorRgb) : undefined
export function cssSupported(css: string): boolean {
return typeof CSS !== 'undefined' && 'supports' in CSS && CSS.supports(css)
}
// Check if the browser supports `oklch` colorspace. If it does not, we fallback to good-old sRGB.
const supportsOklch: boolean =
typeof CSS !== 'undefined' && 'supports' in CSS && CSS.supports('color: oklch(0 0 0)')
/** Whether the browser supports `oklch` colorspace. */
export const browserSupportsOklch: boolean = cssSupported('color: oklch(0 0 0)')
/* Generate a sRGB color value from the provided string. */
/* Generate a CSS color value from the provided string. */
export function colorFromString(s: string) {
const hash: number = hashString(s)
// Split the 32-bit hash value into parts of 12, 10 and 10 bits.
const part1: number = (hash >> 20) & 0xfff
const part2: number = (hash >> 10) & 0x3ff
const part3: number = hash & 0x3ff
// Range values below can be adjusted if necessary, they were chosen arbitrarily.
const chroma = mapInt32(part1, 0.1, 0.16, 12)
const hue = mapInt32(part2, 0, 360, 10)
const lightness = mapInt32(part3, 0.52, 0.57, 10)
const color: Oklch = {
const hue = mapInt32(hash & 0x3ff, 0, 1, 10)
return formatCssColor(ensoColor(hue))
}
/* Returns the enso color for a given hue, in the range 0-1. */
export function ensoColor(hue: number): Oklch {
return {
mode: 'oklch',
l: lightness,
c: chroma,
h: hue,
l: 0.545,
c: 0.14,
h: hue * 360,
}
return supportsOklch ? formatCss(color) : formatRgb(color)
}
/** Format an OKLCH color in CSS. */
export function formatCssColor(color: Oklch) {
return browserSupportsOklch ? formatCss(color) : formatRgb(color)
}
/* Parse the input as a CSS color value; convert it to Oklch if it isn't already. */
export function parseCssColor(cssColor: string): Oklch | undefined {
return oklch(cssColor)
}
/* Map `bits`-wide unsigned integer to the range `[rangeStart, rangeEnd)`. */

View File

@ -340,7 +340,8 @@ lazy val enso = (project in file("."))
`exploratory-benchmark-java-helpers`,
`benchmark-java-helpers`,
`benchmarks-common`,
`bench-processor`
`bench-processor`,
`ydoc-server`
)
.settings(Global / concurrentRestrictions += Tags.exclusive(Exclusive))
.settings(
@ -1149,6 +1150,7 @@ lazy val `ydoc-server` = project
.enablePlugins(JPMSPlugin)
.configs(Test)
.settings(
frgaalJavaCompilerSetting,
crossPaths := false,
autoScalaLibrary := false,
Compile / run / fork := true,

View File

@ -270,6 +270,7 @@ type Array
Find the index of the first palindrome.
["ab", "abab", "aba", "bbb"].to_array.index_of (s-> s == s.reverse) == 2
@condition Filter_Condition.default_widget
index_of : (Any | Filter_Condition | (Any -> Boolean)) -> Integer -> Integer | Nothing
index_of self condition (start : Integer = 0) =
Array_Like_Helpers.index_of self condition start
@ -294,6 +295,7 @@ type Array
Find the index of the last palindrome.
["ab", "abab", "aba", "bbb"].to_array.last_index_of (s-> s == s.reverse) == 3
@condition Filter_Condition.default_widget
last_index_of : (Any | Filter_Condition | (Any -> Boolean)) -> Integer -> Integer | Nothing
last_index_of self condition (start : Integer = -1) =
Array_Like_Helpers.last_index_of self condition start
@ -434,8 +436,9 @@ type Array
[1, 2, 3, 4, 5].to_array.filter (> 3)
[1, 2, 3, 4, 5].to_array.filter (Filter_Condition.Greater than=3)
@filter Filter_Condition.default_widget
filter : (Filter_Condition | (Any -> Boolean)) -> Vector Any
filter self filter = Array_Like_Helpers.filter self filter
filter self (filter : Filter_Condition | (Any -> Boolean)) = Array_Like_Helpers.filter self filter
## GROUP Calculations
ICON dataframes_union
@ -559,8 +562,9 @@ type Array
Splitting an array into even and odd elements.
[1, 2, 3, 4, 5].to_array.partition (x -> x % 2 == 0) == (Pair [2, 4].to_array [1, 3, 5].to_array)
@condition Filter_Condition.default_widget
partition : (Filter_Condition | (Any -> Boolean)) -> Pair (Vector Any) (Vector Any)
partition self condition =
partition self (condition : Filter_Condition | (Any -> Boolean)) =
Array_Like_Helpers.partition self condition
## ICON preparation
@ -696,8 +700,9 @@ type Array
Finding a first element of the array that is larger than 3.
[1, 2, 3, 4, 5].to_array.find (> 3)
@condition Filter_Condition.default_widget
find : (Filter_Condition | (Any -> Boolean)) -> Integer -> Any -> Any
find self condition (start : Integer = 0) ~if_missing=(Error.throw Not_Found) =
find self (condition : Filter_Condition | (Any -> Boolean)) (start : Integer = 0) ~if_missing=(Error.throw Not_Found) =
Array_Like_Helpers.find self condition start if_missing
## ICON select_row
@ -801,8 +806,9 @@ type Array
Checking if any element of the array is even.
[1, 2, 3, 4, 5].to_array.any (x-> x%2 == 0)
@condition Filter_Condition.default_widget
any : (Filter_Condition | (Any -> Boolean)) -> Boolean
any self condition = Array_Like_Helpers.any self condition
any self (condition : Filter_Condition | (Any -> Boolean)) = Array_Like_Helpers.any self condition
## GROUP Logical
ICON preparation
@ -821,8 +827,9 @@ type Array
Check if all elements in the array are even.
[-1, 1, 5, 8].to_array.all (x-> x%2 == 0)
@condition Filter_Condition.default_widget
all : (Filter_Condition | (Any -> Boolean)) -> Boolean
all self condition = Array_Like_Helpers.all self condition
all self (condition : Filter_Condition | (Any -> Boolean)) = Array_Like_Helpers.all self condition
## GROUP Logical
ICON preparation

View File

@ -15,7 +15,8 @@ import project.Nothing.Nothing
from project.Data.Boolean import Boolean, False, True
from project.Data.Filter_Condition.Filter_Condition import all
from project.Data.Text.Extensions import all
from project.Metadata import make_single_choice, Widget
from project.Metadata import Display, make_single_choice, Widget
from project.Metadata.Choice import Option
polyglot java import org.enso.base.Regex_Utils
@ -228,39 +229,52 @@ type Filter_Condition
Creates a Single_Choice Widget for delimiters.
default_widget : Boolean -> Boolean -> Boolean -> Boolean -> Boolean -> Widget
default_widget include_comparable=True include_text=True include_boolean=True include_nullable=True include_numeric=True =
options_before = Vector.build options_builder->
if include_comparable then
options_builder.append "Less"
options_builder.append "Equal_Or_Less"
options_builder.append "Equal"
options_builder.append "Equal_Or_Greater"
options_builder.append "Greater"
options_builder.append "Not_Equal"
options_builder.append "Between"
if include_numeric then
options_builder.append "Is_Nan"
options_builder.append "Is_Infinite"
options_builder.append "Is_Finite"
if include_boolean then
options_builder.append "Is_True"
options_builder.append "Is_False"
if include_nullable then
options_builder.append "Is_Nothing"
options_builder.append "Not_Nothing"
if include_text then
options_builder.append "Is_Empty"
options_builder.append "Not_Empty"
options_builder.append "Equal_Ignore_Case"
options_builder.append "Starts_With"
options_builder.append "Ends_With"
options_builder.append "Contains"
options_builder.append "Like"
options_builder.append "Is_In"
text = if include_text then [Option "<Text Value>" "''"] else []
number = if include_numeric then [Option "<Number Value>" "0"] else []
boolean = if include_boolean then [Option "<True/False>" "True"] else []
with_all_types = Widget.Single_Choice values=text+number+boolean display=Display.Always
with_number_text = Widget.Single_Choice values=text+number display=Display.Always
## Can't use auto-scoping as we allow predicates or a Filter_Condition.
fqn = Meta.get_qualified_type_name Filter_Condition
options = Vector.build builder->
builder.append (Option "Equals" fqn+".Equal" [["to", with_all_types]])
builder.append (Option "Not Equals" fqn+".Not_Equal" [["to", with_all_types]])
if include_comparable then
builder.append (Option "Less Than" fqn+".Less" [["than", with_number_text]])
builder.append (Option "Less Than Or Equal" fqn+".Equal_Or_Less" [["than", with_number_text]])
builder.append (Option "Greater Than" fqn+".Greater" [["than", with_number_text]])
builder.append (Option "Greater Than Or Equal" fqn+".Equal_Or_Greater" [["than", with_number_text]])
builder.append (Option "Between" fqn+".Between" [["lower", with_number_text], ["upper", with_number_text]])
if include_numeric then
builder.append (Option "Is Finite" fqn+".Is_Finite")
builder.append (Option "Is Infinite" fqn+".Is_Infinite")
builder.append (Option "Is NaN" fqn+".Is_Nan")
if include_boolean then
builder.append (Option "Is True" fqn+".Is_True")
builder.append (Option "Is False" fqn+".Is_False")
if include_nullable then
builder.append (Option "Is Nothing" fqn+".Is_Nothing")
builder.append (Option "Is Not Nothing" fqn+".Not_Nothing")
if include_text then
builder.append (Option "Equals Ignore Case" fqn+".Equal_Ignore_Case" [["to", Widget.Text_Input]])
builder.append (Option "Starts With" fqn+".Starts_With" [["prefix", Widget.Text_Input]])
builder.append (Option "Ends With" fqn+".Ends_With" [["suffix", Widget.Text_Input]])
builder.append (Option "Contains" fqn+".Contains" [["substring", Widget.Text_Input]])
builder.append (Option "Is Empty" fqn+".Is_Empty")
builder.append (Option "Is Not Empty" fqn+".Not_Empty")
builder.append (Option "Like" fqn+".Like" [["pattern", Widget.Text_Input]])
value_editor = Widget.Vector_Editor item_editor=with_all_types display=Display.Always item_default=''
builder.append (Option "Is In" fqn+".Is_In" [["values", value_editor]])
options = options_before.map constructor_name->
name = constructor_name.replace "_" " "
code = "(Filter_Condition." + constructor_name + ")"
[name, code]
make_single_choice options
## Specifies the action of a Filter_Condition.

View File

@ -174,8 +174,9 @@ type List
import Standard.Examples
example_any = Examples.list.any (x-> x%2 == 0)
@condition Filter_Condition.default_widget
any : (Filter_Condition | (Any -> Boolean)) -> Boolean
any self condition =
any self (condition : Filter_Condition | (Integer -> Boolean)) =
predicate = unify_condition_or_predicate condition
go list = case list of
Nil -> False
@ -205,8 +206,9 @@ type List
import Standard.Examples
example_all = Examples.list.all (x-> x%2 == 0)
@condition Filter_Condition.default_widget
all : (Filter_Condition | (Any -> Boolean)) -> Boolean
all self condition =
all self (condition : Filter_Condition | (Integer -> Boolean)) =
predicate = unify_condition_or_predicate condition
self . any (predicate >> .not) . not
@ -271,8 +273,9 @@ type List
example_filter = Examples.list.filter (< 3)
example_filter = Examples.list.filter (Filter_Condition.Less than=3)
@filter Filter_Condition.default_widget
filter : (Filter_Condition | (Any -> Boolean)) -> List Any
filter self filter =
filter self (filter : Filter_Condition | (Any -> Boolean)) =
predicate = unify_condition_or_predicate filter
case self of
Nil -> Nil
@ -308,8 +311,9 @@ type List
Splitting a list into even and odd elements.
(Cons 1 (Cons 2 (Cons 3 Nil))).partition (x -> x % 2 == 0) == (Pair (Cons 2 Nil) (Cons 1 (Cons 3 Nil)))
@condition Filter_Condition.default_widget
partition : (Filter_Condition | (Any -> Boolean)) -> Pair (List Any) (List Any)
partition self condition =
partition self (condition : Filter_Condition | (Integer -> Boolean)) =
predicate = unify_condition_or_predicate condition
self.reverse.fold (Pair.new Nil Nil) acc-> elem->
case predicate elem of
@ -573,8 +577,9 @@ type List
import Standard.Examples
example_first = Examples.list.find (> 2)
@condition Filter_Condition.default_widget
find : (Filter_Condition | (Any -> Boolean)) -> Integer -> Any -> Any
find self condition (start : Integer = 0) ~if_missing=(Error.throw Not_Found) =
find self (condition : Filter_Condition | (Integer -> Boolean)) (start : Integer = 0) ~if_missing=(Error.throw Not_Found) =
predicate = unify_condition_or_predicate condition
case start.signum of
-1 ->
@ -602,6 +607,7 @@ type List
Finding a first element of the vector that is larger than 3.
[1, 2, 3, 4, 5].find (> 3)
@condition Filter_Condition.default_widget
index_of : (Any | Filter_Condition | (Any -> Boolean)) -> Integer -> Integer | Nothing
index_of self condition (start : Integer = 0) = case start.signum of
-1 ->
@ -631,6 +637,7 @@ type List
Finding a first element of the vector that is larger than 3.
[1, 2, 3, 4, 5].find (> 3)
@condition Filter_Condition.default_widget
last_index_of : (Any | Filter_Condition | (Any -> Boolean)) -> Integer -> Integer | Nothing
last_index_of self condition (start : Integer = -1) = case self of
Nil -> if start == -1 || start == 0 then Nothing else Error.throw (Index_Out_Of_Bounds.Error start 0)

View File

@ -196,7 +196,7 @@ type Range
(0.up_to 7).filter (Filter_Condition.Greater than=3)
@filter range_default_filter_condition_widget
filter : (Filter_Condition | (Integer -> Boolean)) -> Vector Integer
filter self filter =
filter self (filter : Filter_Condition | (Integer -> Boolean)) =
predicate = unify_condition_or_predicate filter
Vector.build builder->
self.map elem->
@ -224,7 +224,7 @@ type Range
(0.up_to 4).partition (x-> x%2 == 0) == (Pair [0, 2] [1, 3])
@condition range_default_filter_condition_widget
partition : (Filter_Condition | (Integer -> Boolean)) -> Pair (Vector Integer) (Vector Integer)
partition self condition =
partition self (condition : Filter_Condition | (Integer -> Boolean)) =
predicate = unify_condition_or_predicate condition
pair = self.fold (Pair.new Vector.new_builder Vector.new_builder) acc-> elem->
case predicate elem of
@ -369,7 +369,7 @@ type Range
10.up_to 100 . with_step 2 . all (x-> x%2 == 0)
@condition range_default_filter_condition_widget
all : (Filter_Condition | (Integer -> Boolean)) -> Boolean
all self condition =
all self (condition : Filter_Condition | (Integer -> Boolean)) =
predicate = unify_condition_or_predicate condition
self.any (predicate >> .not) . not
@ -392,7 +392,7 @@ type Range
1.up_to 100 . any (x-> x%2 == 0)
@condition range_default_filter_condition_widget
any : (Filter_Condition | (Integer -> Boolean)) -> Boolean
any self condition = self.find condition . is_nothing . not
any self (condition : Filter_Condition | (Integer -> Boolean)) = self.find condition . is_nothing . not
## GROUP Selections
ICON find
@ -417,7 +417,7 @@ type Range
1.up_to 100 . find (Filter_Condition.Greater than=10)
@condition range_default_filter_condition_widget
find : (Filter_Condition | (Integer -> Boolean)) -> Integer -> Any -> Any
find self condition (start : Integer = 0) ~if_missing=Nothing =
find self (condition : Filter_Condition | (Integer -> Boolean)) (start : Integer = 0) ~if_missing=Nothing =
predicate = unify_condition_or_predicate condition
check_start_valid start self used_start->
result = find_internal self used_start predicate

View File

@ -255,7 +255,7 @@ type Date_Range
(Date.new 2020 10 01).up_to (Date.new 2020 10 15) . filter (Filter_Condition.Greater than=(Date.new 2020 10 12))
@filter date_range_default_filter_condition_widget
filter : (Filter_Condition | (Date -> Boolean)) -> Vector Date
filter self filter =
filter self (filter : Filter_Condition | (Date -> Boolean)) =
predicate = unify_condition_or_predicate filter
Vector.build builder->
self.map elem->
@ -285,7 +285,7 @@ type Date_Range
(Date.new 2020 10 01).up_to (Date.new 2020 10 15).partition is_weekend
@condition date_range_default_filter_condition_widget
partition : (Filter_Condition | (Date -> Boolean)) -> Pair (Vector Date) (Vector Date)
partition self condition =
partition self (condition : Filter_Condition | (Date -> Boolean)) =
predicate = unify_condition_or_predicate condition
pair = self.fold (Pair.new Vector.new_builder Vector.new_builder) acc-> elem->
case predicate elem of
@ -382,7 +382,7 @@ type Date_Range
(Date.new 2020 10 01).up_to (Date.new 2020 10 31) . all (> (Date.new 2020 10 01))
@condition date_range_default_filter_condition_widget
all : (Filter_Condition | (Date -> Boolean)) -> Boolean
all self condition =
all self (condition : Filter_Condition | (Date -> Boolean)) =
predicate = unify_condition_or_predicate condition
self . any (predicate >> .not) . not
@ -400,7 +400,7 @@ type Date_Range
(Date.new 2020 10 01).up_to (Date.new 2020 10 31) . any (Filter_Condition.Greater (Date.new 2020 10 01))
@condition date_range_default_filter_condition_widget
any : (Filter_Condition | (Date -> Boolean)) -> Boolean
any self condition = self.find condition . is_nothing . not
any self (condition : Filter_Condition | (Date -> Boolean)) = self.find condition . is_nothing . not
## GROUP Selections
ICON find
@ -420,7 +420,7 @@ type Date_Range
(Date.new 2020 10 01).up_to (Date.new 2020 10 31) . find (d-> d.day_of_week == Day_Of_Week.Monday)
@condition date_range_default_filter_condition_widget
find : (Filter_Condition | (Date -> Boolean)) -> Integer -> Any -> Any
find self condition (start : Integer = 0) ~if_missing=Nothing =
find self (condition : Filter_Condition | (Date -> Boolean)) (start : Integer = 0) ~if_missing=Nothing =
predicate = unify_condition_or_predicate condition
index = self.index_of predicate start
case index of

View File

@ -377,8 +377,9 @@ type Vector a
Finding a first element of the vector that is larger than 3.
[1, 2, 3, 4, 5].find (> 3)
@condition Filter_Condition.default_widget
find : (Filter_Condition | (Any -> Boolean)) -> Integer -> Any -> Any
find self condition (start : Integer = 0) ~if_missing=(Error.throw Not_Found) =
find self (condition : Filter_Condition | (Any -> Boolean)) (start : Integer = 0) ~if_missing=(Error.throw Not_Found) =
Array_Like_Helpers.find self condition start if_missing
## GROUP Values
@ -401,6 +402,7 @@ type Vector a
Find the index of the first palindrome.
["ab", "abab", "aba", "bbb"].index_of (s-> s == s.reverse) == 2
@condition Filter_Condition.default_widget
index_of : (Any | Filter_Condition | (Any -> Boolean)) -> Integer -> Integer | Nothing
index_of self condition (start : Integer = 0) =
Array_Like_Helpers.index_of self condition start
@ -425,6 +427,7 @@ type Vector a
Find the index of the last palindrome.
["ab", "abab", "aba", "bbb"].last_index_of (s-> s == s.reverse) == 3
@condition Filter_Condition.default_widget
last_index_of : (Any | Filter_Condition | (Any -> Boolean)) -> Integer -> Integer | Nothing
last_index_of self condition (start : Integer = -1) =
Array_Like_Helpers.last_index_of self condition start
@ -446,8 +449,9 @@ type Vector a
Checking if any element of the vector is even.
[1, 2, 3, 4, 5].any (x-> x%2 == 0)
@condition Filter_Condition.default_widget
any : (Filter_Condition | (Any -> Boolean)) -> Boolean
any self condition =
any self (condition : Filter_Condition | (Any -> Boolean)) =
Array_Like_Helpers.any self condition
## GROUP Logical
@ -467,8 +471,9 @@ type Vector a
Check if all elements in the vector are even.
[-1, 1, 5, 8].all (x-> x%2 == 0)
@condition Filter_Condition.default_widget
all : (Filter_Condition | (Any -> Boolean)) -> Boolean
all self condition =
all self (condition : Filter_Condition | (Any -> Boolean)) =
Array_Like_Helpers.all self condition
## GROUP Logical
@ -522,8 +527,9 @@ type Vector a
[1, 2, 3, 4, 5].filter (> 3)
[1, 2, 3, 4, 5].filter (Filter_Condition.Greater than=3)
@filter Filter_Condition.default_widget
filter : (Filter_Condition | (Any -> Boolean)) -> Vector Any
filter self filter =
filter self (filter : Filter_Condition | (Any -> Boolean)) =
Array_Like_Helpers.filter self filter
## GROUP Selections
@ -568,8 +574,9 @@ type Vector a
Splitting a vector into even and odd elements.
[1, 2, 3, 4, 5].partition (x -> x % 2 == 0) == (Pair [2, 4] [1, 3, 5])
@condition Filter_Condition.default_widget
partition : (Filter_Condition | (Any -> Boolean)) -> Pair (Vector Any) (Vector Any)
partition self condition =
partition self (condition : Filter_Condition | (Any -> Boolean)) =
Array_Like_Helpers.partition self condition
## ICON preparation
@ -1392,8 +1399,9 @@ type Builder
- condition: A `Filter_Condition` or a function that takes a vector
element and returns a boolean value that says whether that value
satisfies a condition.
@condition Filter_Condition.default_widget
any : (Filter_Condition | (Any -> Boolean)) -> Boolean
any self condition =
any self (condition : Filter_Condition | (Any -> Boolean)) =
predicate = unify_condition_or_predicate condition
0.up_to self.length . any (idx -> (predicate (self.java_builder.get idx)))

View File

@ -35,7 +35,7 @@ type DB_Column
Represents a single column backed by a database.
Arguments:
- name: The name of the column.
- internal_name: The name of the column.
- connection: The connection with which the column is associated.
- sql_type_reference: Lazily computed SQL type of the column.
- expression: The expressions to apply to the column.
@ -47,7 +47,15 @@ type DB_Column
which they come. Combined expressions must come from the same context -
they must both have the same filtering, grouping etc. rules applied to be
able to be combined.
Value name:Text connection:(Connection | Any) sql_type_reference:SQL_Type_Reference expression:SQL_Expression context:Context
private Value internal_name:Text connection:(Connection | Any) sql_type_reference:SQL_Type_Reference expression:SQL_Expression context:Context
## The name of the column.
name : Text
name self = self.internal_name
## The name of the SQL Dialect used by the table.
dialect_name : Text
dialect_name self = self.connection.dialect.name
## PRIVATE
ADVANCED
@ -82,15 +90,14 @@ type DB_Column
to_table self =
DB_Table.Value self.name self.connection [self.as_internal] self.context
## ALIAS metadata
## ALIAS metadata, field info, column type
GROUP Standard.Base.Metadata
ICON metadata
Returns a Table describing this column's contents.
Returns a Table describing this column's contents and type.
The table behaves like `Table.info` - it lists the column name, the count
of non-null items and the value type.
The table behaves like `DB_Table.column_info.
info : Table
info self -> Table = self.to_table.info
info self -> Table = self.to_table.column_info
## GROUP Standard.Base.Input
ICON data_input
@ -1908,7 +1915,7 @@ adapt_unified_column column expected_type =
infer_return_type expression =
SQL_Type_Reference.new column.connection column.context expression
adapted = dialect.adapt_unified_column column.as_internal expected_type infer_return_type
DB_Column.Value name=column.name connection=column.connection sql_type_reference=adapted.sql_type_reference expression=adapted.expression context=column.context
DB_Column.Value column.name column.connection adapted.sql_type_reference adapted.expression column.context
## PRIVATE
Column.from (that:DB_Column) =

View File

@ -74,11 +74,19 @@ type DB_Table
Represents a column-oriented table data structure backed by a database.
Arguments:
- name: The name of the table.
- internal_name: The name of the table.
- connection: The connection with which the table is associated.
- internal_columns: The internal representation of the table columns.
- context: The context associated with this table.
Value name:Text connection:(Connection | Any) (internal_columns:(Vector Internal_Column)) context:Context
private Value internal_name:Text connection:(Connection | Any) (internal_columns:(Vector Internal_Column)) context:Context
## The name of the table.
name : Text
name self = self.internal_name
## The name of the SQL Dialect used by the table.
dialect_name : Text
dialect_name self = self.connection.dialect.name
## PRIVATE
ADVANCED
@ -154,31 +162,36 @@ type DB_Table
col = self.get selector if_missing=Nothing
if Nothing == col then if_missing else col.get index if_missing
## GROUP Standard.Base.Selections
## ALIAS first field
GROUP Standard.Base.Selections
ICON select_column
Gets the first column.
first_column : DB_Column ! Index_Out_Of_Bounds
first_column self = self.at 0
## GROUP Standard.Base.Selections
## ALIAS second field
GROUP Standard.Base.Selections
ICON select_column
Gets the second column
second_column : DB_Column ! Index_Out_Of_Bounds
second_column self = self.at 1
## GROUP Standard.Base.Selections
## ALIAS last field
GROUP Standard.Base.Selections
ICON select_column
Gets the last column
last_column : DB_Column ! Index_Out_Of_Bounds
last_column self = self.at -1
## GROUP Standard.Base.Metadata
## ALIAS field count
GROUP Standard.Base.Metadata
ICON metadata
Returns the number of columns in the table.
column_count : Integer
column_count self = self.internal_columns.length
## GROUP Standard.Base.Selections
## ALIAS select fields
GROUP Standard.Base.Selections
ICON select_column
Returns a new table with a chosen subset of columns, as specified by the
`columns`, from the input table. Any unmatched input columns will be
@ -234,7 +247,8 @@ type DB_Table
new_columns = self.columns_helper.select_columns columns case_sensitivity reorder error_on_missing_columns on_problems
self.updated_columns new_columns
## GROUP Standard.Base.Selections
## ALIAS select fields by type
GROUP Standard.Base.Selections
ICON select_column
Returns a new table with the chosen set of columns filtered by the type
of the column.
@ -245,13 +259,13 @@ type DB_Table
be selected. If `False`, columns with related types will also be
selected (i.e. ignore size, precision).
@types Widget_Helpers.make_value_type_vector_selector
select_by_type : Vector Value_Type -> Boolean -> Table
select_by_type self types:Vector strict:Boolean=False =
select_columns_by_type : Vector Value_Type -> Boolean -> Table
select_columns_by_type self types:Vector strict:Boolean=False =
helper = Table_Helpers.Table_Column_Helper.Value self.columns self.make_column self .read
new_columns = helper.select_by_type types strict
self.updated_columns (new_columns.map _.as_internal)
## ALIAS drop_columns
## ALIAS drop_columns, drop fields, remove fields
GROUP Standard.Base.Selections
ICON select_column
Returns a new table with the chosen set of columns, as specified by the
@ -305,7 +319,8 @@ type DB_Table
new_columns = self.columns_helper.remove_columns columns case_sensitivity error_on_missing_columns=error_on_missing_columns on_problems=on_problems
self.updated_columns new_columns
## GROUP Standard.Base.Selections
## ALIAS remove fields by type
GROUP Standard.Base.Selections
ICON select_column
Returns a new table with the chosen set of columns filtering out based
on the type of the column.
@ -316,13 +331,13 @@ type DB_Table
be removed. If `False`, columns with related types will also be
removed (i.e. ignore size, precision).
@types Widget_Helpers.make_value_type_vector_selector
remove_by_type : Vector Value_Type -> Boolean -> Table
remove_by_type self types:Vector strict:Boolean=False =
remove_columns_by_type : Vector Value_Type -> Boolean -> Table
remove_columns_by_type self types:Vector strict:Boolean=False =
helper = Table_Helpers.Table_Column_Helper.Value self.columns self.make_column self .read
new_columns = helper.remove_by_type types strict
self.updated_columns (new_columns.map _.as_internal)
## ALIAS select_missing_columns, select_na
## ALIAS select_missing_columns, select_na, select_blank_fields
GROUP Standard.Base.Selections
ICON select_column
@ -2570,15 +2585,15 @@ type DB_Table
to_sql : SQL_Statement
to_sql self = self.connection.dialect.generate_sql self.to_select_query
## ALIAS metadata
## ALIAS metadata, field info, column types
GROUP Standard.Base.Metadata
ICON metadata
Returns a Table describing this table's contents.
The table lists all columns, counts of non-null items and value types of
each column.
info : Table
info self =
column_info : Table
column_info self =
cols = self.internal_columns
count_query =
## Performing a subquery is the most robust way to handle both

View File

@ -29,7 +29,7 @@ make_aggregate_column : DB_Table -> Aggregate_Column -> Text -> Dialect -> (Any
make_aggregate_column table aggregate as dialect infer_return_type problem_builder =
is_non_empty_selector v = v.is_nothing.not && v.not_empty
simple_aggregate op_kind columns =
expression = SQL_Expression.Operation op_kind (columns.map .expression)
expression = SQL_Expression.Operation op_kind (columns.map c->c.expression)
sql_type_ref = infer_return_type op_kind columns expression
Internal_Column.Value as sql_type_ref expression

View File

@ -8,6 +8,7 @@ import Standard.Table.Internal.Vector_Builder.Vector_Builder
from Standard.Table import Aggregate_Column, Column, Value_Type
from Standard.Table.Aggregate_Column.Aggregate_Column import all
from Standard.Table.Errors import Inexact_Type_Coercion
from Standard.Table.Internal.Storage import get_storage_for_column
import project.Connection.Connection.Connection
import project.DB_Column.DB_Column
@ -258,7 +259,7 @@ type Postgres_Dialect
# We cannot have a specified scale and no precision, so special handling is needed for this:
case precision.is_nothing && scale.is_nothing.not of
True ->
needed_precision = column.java_column.getStorage.getMaxPrecisionStored
needed_precision = (get_storage_for_column column).getMaxPrecisionStored
new_type = case needed_precision <= 1000 of
# If the precision is small enough that our number will fit, we create a column with maximum supported precision.
True -> Value_Type.Decimal 1000 scale

View File

@ -137,7 +137,7 @@ type Column
Arguments:
- java_column: The internal representation of the column.
Value java_column
private Value java_column
## PRIVATE
ADVANCED
@ -2226,15 +2226,14 @@ type Column
to_table : Table
to_table self = Table.Value self.java_column.toTable
## ALIAS metadata
## ALIAS metadata, field info, column type
GROUP Standard.Base.Metadata
ICON metadata
Returns a Table describing this column's contents.
Returns a Table describing this column's contents and type.
The table behaves like `Table.info` - it lists the column name, the count
of non-null items and the value type.
The table behaves like `Table.column_info.
info : Table
info self = self.to_table.info
info self = self.to_table.column_info
## ALIAS order_by
GROUP Standard.Base.Selections

View File

@ -32,7 +32,7 @@ add_row_number table name from step group_by order_by on_problems =
False ->
ordering_columns = ordering.map c->c.column.java_column
directions = ordering.map c->c.associated_selector.direction.to_sign
grouping_java_columns = grouping_columns.map .java_column
grouping_java_columns = grouping_columns.map c->c.java_column
new_storage = Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
AddRowNumber.create_numbering from step grouping_java_columns ordering_columns directions java_problem_aggregator
Column.from_storage name new_storage

View File

@ -32,7 +32,7 @@ add_running table (statistic:Statistic=Statistic.Count) (of:Text|Integer=0) (as:
grouping_columns = table.columns_helper.select_columns_helper group_by Case_Sensitivity.Default True problem_builder
ordering = Table_Helpers.resolve_order_by table.columns order_by problem_builder
source_java_column = of_col.java_column
grouping_java_columns = grouping_columns.map .java_column
grouping_java_columns = grouping_columns.map c->c.java_column
ordering_java_columns = ordering.map c->
c.column.java_column
directions = ordering.map c->

View File

@ -233,7 +233,7 @@ java_aggregator name column =
Group_By c _ -> GroupByAggregator.new name c.java_column
Count _ -> CountAggregator.new name
Count_Distinct columns _ ignore_nothing ->
resolved = columns.map .java_column
resolved = columns.map c->c.java_column
CountDistinctAggregator.new name resolved ignore_nothing
Count_Not_Nothing c _ -> CountNothingAggregator.new name c.java_column False
Count_Nothing c _ -> CountNothingAggregator.new name c.java_column True

View File

@ -3,6 +3,7 @@ import Standard.Base.Errors.Common.Index_Out_Of_Bounds
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
import project.Column.Column
import project.Value_Type.Bits
import project.Value_Type.Value_Type
from project.Errors import Inexact_Type_Coercion
@ -94,3 +95,7 @@ from_value_type_strict value_type =
make_builder : StorageType -> Integer -> ProblemAggregator -> Java_Builder
make_builder storage initial_size java_problem_aggregator =
Java_Builder.getForType storage initial_size java_problem_aggregator
## PRIVATE
Helper function for tests to get the storage of a Column
get_storage_for_column column:Column = column.java_column.getStorage

View File

@ -116,35 +116,33 @@ make_fill_default_value_selector column_source=Nothing display=Display.Always ad
Make a filter condition selector.
make_filter_condition_selector : Table -> Display -> Widget
make_filter_condition_selector table display=Display.Always =
col_names = make_column_ref_by_name_selector table
with_all_types = make_column_ref_by_name_selector table add_text=True add_number=True add_boolean=True
with_number_text = make_column_ref_by_name_selector table add_text=True add_number=True
with_text = make_column_ref_by_name_selector table add_text=True
builder = Vector.new_builder
fqn = Meta.get_qualified_type_name Filter_Condition
builder.append (Option "Equals" fqn+".Equal" [["to", with_all_types]])
builder.append (Option "Not Equals" fqn+".Not_Equal" [["to", with_all_types]])
builder.append (Option "Less Than" fqn+".Less" [["than", with_number_text]])
builder.append (Option "Less Than Or Equal" fqn+".Equal_Or_Less" [["than", with_number_text]])
builder.append (Option "Greater Than" fqn+".Greater" [["than", with_number_text]])
builder.append (Option "Greater Than Or Equal" fqn+".Equal_Or_Greater" [["than", with_number_text]])
builder.append (Option "Between" fqn+".Between" [["lower", col_names], ["upper", with_number_text]])
builder.append (Option "Equals Ignore Case" fqn+".Equal_Ignore_Case" [["to", with_text]])
builder.append (Option "Starts With" fqn+".Starts_With" [["prefix", with_text]])
builder.append (Option "Ends With" fqn+".Ends_With" [["suffix", with_text]])
builder.append (Option "Contains" fqn+".Contains" [["substring", with_text]])
builder.append (Option "Is Nothing" fqn+".Is_Nothing")
builder.append (Option "Is Not Nothing" fqn+".Not_Nothing")
builder.append (Option "Is Finite" fqn+".Is_Finite")
builder.append (Option "Is Infinite" fqn+".Is_Infinite")
builder.append (Option "Is NaN" fqn+".Is_Nan")
builder.append (Option "Is True" fqn+".Is_True")
builder.append (Option "Is False" fqn+".Is_False")
builder.append (Option "Is Empty" fqn+".Is_Empty")
builder.append (Option "Is Not Empty" fqn+".Not_Empty")
builder.append (Option "Like" fqn+".Like" [["pattern", with_text]])
builder.append (Option "Is In" fqn+".Is_In")
builder.append (Option "Equals" "..Equal" [["to", with_all_types]])
builder.append (Option "Not Equals" "..Not_Equal" [["to", with_all_types]])
builder.append (Option "Less Than" "..Less" [["than", with_number_text]])
builder.append (Option "Less Than Or Equal" "..Equal_Or_Less" [["than", with_number_text]])
builder.append (Option "Greater Than" "..Greater" [["than", with_number_text]])
builder.append (Option "Greater Than Or Equal" "..Equal_Or_Greater" [["than", with_number_text]])
builder.append (Option "Between" "..Between" [["lower", with_number_text], ["upper", with_number_text]])
builder.append (Option "Equals Ignore Case" "..Equal_Ignore_Case" [["to", with_text]])
builder.append (Option "Starts With" "..Starts_With" [["prefix", with_text]])
builder.append (Option "Ends With" "..Ends_With" [["suffix", with_text]])
builder.append (Option "Contains" "..Contains" [["substring", with_text]])
builder.append (Option "Is Nothing" "..Is_Nothing")
builder.append (Option "Is Not Nothing" "..Not_Nothing")
builder.append (Option "Is Finite" "..Is_Finite")
builder.append (Option "Is Infinite" "..Is_Infinite")
builder.append (Option "Is NaN" "..Is_Nan")
builder.append (Option "Is True" "..Is_True")
builder.append (Option "Is False" "..Is_False")
builder.append (Option "Is Empty" "..Is_Empty")
builder.append (Option "Is Not Empty" "..Not_Empty")
builder.append (Option "Like" "..Like" [["pattern", with_text]])
builder.append (Option "Is In" "..Is_In")
Single_Choice builder.to_vector display=display
## PRIVATE

View File

@ -4,6 +4,7 @@ import Standard.Base.Metadata.Display
import Standard.Base.Metadata.Widget
from Standard.Base.Metadata.Choice import Option
from Standard.Base.Metadata.Widget import Single_Choice
from Standard.Base.Widget_Helpers import make_format_chooser
import project.Column_Ref.Column_Ref
import project.Expression.Expression
@ -87,7 +88,7 @@ type Simple_Expression
builder.append (Option "truncate" fqn+".Truncate")
builder.append (Option "min" fqn+".Min" [["rhs", with_number_text]])
builder.append (Option "max" fqn+".Max" [["rhs", with_number_text]])
builder.append (Option "date add" fqn+".Date_Add" [["length", col_names]])
builder.append (Option "date add" fqn+".Date_Add" [["length", with_number]])
builder.append (Option "date part" fqn+".Date_Part")
builder.append (Option "date diff" fqn+".Date_Diff" [["end", col_names]])
builder.append (Option "not" fqn+".Not")
@ -98,7 +99,7 @@ type Simple_Expression
builder.append (Option "text_left" fqn+".Text_Left" [["length", with_number]])
builder.append (Option "text_right" fqn+".Text_Right" [["length", with_number]])
builder.append (Option "text_length" fqn+".Text_Length")
builder.append (Option "format" fqn+".Format")
builder.append (Option "format" fqn+".Format" [["format", make_format_chooser include_number=True]])
fqn_column = Meta.get_qualified_type_name Simple_Expression
derived = Option "<Simple Expression>" fqn_column+".From" [["input", with_all_types], ["operation", Single_Choice builder.to_vector]]
@ -149,10 +150,10 @@ type Simple_Calculation
Max (rhs : Column_Ref|Expression|Any)
## Adds a period to a date/time column.
Date_Add (length : Column_Ref|Expression|Integer) (period : Date_Period|Time_Period = Date_Period.Day)
Date_Add (length : Column_Ref|Expression|Integer = 1) (period : Date_Period|Time_Period = Date_Period.Day)
## Returns part of a date/time column.
Date_Part (period : Date_Period|Time_Period)
Date_Part (period : Date_Period|Time_Period = Date_Period.Day)
## Returns the difference between two date/time columns.
Date_Diff (end : Column_Ref|Expression|Date_Time|Date|Time_Of_Day) (period:Date_Period|Time_Period = Date_Period.Day)

View File

@ -163,7 +163,7 @@ type Table
Arguments:
- java_table: The internal java representation of the table.
Value java_table
private Value java_table
## PRIVATE
ADVANCED
@ -310,31 +310,36 @@ type Table
col = self.get selector if_missing=Nothing
if Nothing == col then if_missing else col.get index if_missing
## GROUP Standard.Base.Selections
## ALIAS first field
GROUP Standard.Base.Selections
ICON select_column
Gets the first column.
first_column : Column ! Index_Out_Of_Bounds
first_column self = self.at 0
## GROUP Standard.Base.Selections
## ALIAS second field
GROUP Standard.Base.Selections
ICON select_column
Gets the second column
second_column : Column ! Index_Out_Of_Bounds
second_column self = self.at 1
## GROUP Standard.Base.Selections
## ALIAS last field
GROUP Standard.Base.Selections
ICON select_column
Gets the last column
last_column : Column ! Index_Out_Of_Bounds
last_column self = self.at -1
## GROUP Standard.Base.Metadata
## ALIAS field count
GROUP Standard.Base.Metadata
ICON metadata
Returns the number of columns in the table.
column_count : Integer
column_count self = self.java_table.getColumns.length
## GROUP Standard.Base.Selections
## ALIAS select fields
GROUP Standard.Base.Selections
ICON select_column
Returns a new table with a chosen subset of columns, as specified by the
`columns`, from the input table. Any unmatched input columns will be
@ -391,7 +396,8 @@ type Table
new_columns = self.columns_helper.select_columns columns case_sensitivity reorder error_on_missing_columns on_problems
Table.new new_columns
## GROUP Standard.Base.Selections
## ALIAS select fields by type
GROUP Standard.Base.Selections
ICON select_column
Returns a new table with the chosen set of columns filtered by the type
of the column.
@ -402,12 +408,12 @@ type Table
be selected. If `False`, columns with related types will also be
selected (i.e. ignore size, precision).
@types Widget_Helpers.make_value_type_vector_selector
select_by_type : Vector Value_Type -> Boolean -> Table
select_by_type self types:Vector strict:Boolean=False =
select_columns_by_type : Vector Value_Type -> Boolean -> Table
select_columns_by_type self types:Vector strict:Boolean=False =
new_columns = self.columns_helper.select_by_type types strict
Table.new new_columns
## ALIAS drop_columns
## ALIAS drop_columns, drop fields, remove fields
GROUP Standard.Base.Selections
ICON select_column
Returns a new table with the chosen set of columns, as specified by the
@ -461,7 +467,8 @@ type Table
new_columns = self.columns_helper.remove_columns columns case_sensitivity error_on_missing_columns=error_on_missing_columns on_problems=on_problems
Table.new new_columns
## GROUP Standard.Base.Selections
## ALIAS remove fields by type
GROUP Standard.Base.Selections
ICON select_column
Returns a new table with the chosen set of columns filtering out based
on the type of the column.
@ -472,12 +479,12 @@ type Table
be removed. If `False`, columns with related types will also be
removed (i.e. ignore size, precision).
@types Widget_Helpers.make_value_type_vector_selector
remove_by_type : Vector Value_Type -> Boolean -> Table
remove_by_type self types:Vector strict:Boolean=False =
remove_columns_by_type : Vector Value_Type -> Boolean -> Table
remove_columns_by_type self types:Vector strict:Boolean=False =
new_columns = self.columns_helper.remove_by_type types strict
Table.new new_columns
## ALIAS select_missing_columns, select_na
## ALIAS select_missing_columns, select_na, select_blank_fields
GROUP Standard.Base.Selections
ICON select_column
@ -787,7 +794,7 @@ type Table
validated = Aggregate_Column_Helper.prepare_aggregate_columns self.column_naming_helper normalized_group_by columns self error_on_missing_columns=error_on_missing_columns
on_problems.attach_problems_before validated.problems <| Illegal_Argument.handle_java_exception <|
java_key_columns = validated.key_columns.map .java_column
java_key_columns = validated.key_columns.map c->c.java_column
Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
index = self.java_table.indexFromColumns java_key_columns java_problem_aggregator
new_columns = validated.valid_columns.map c->(Aggregate_Column_Helper.java_aggregator c.first c.second)
@ -924,7 +931,7 @@ type Table
distinct self (columns = self.column_names) case_sensitivity=Case_Sensitivity.Default error_on_missing_columns=True on_problems=Report_Warning =
key_columns = self.columns_helper.select_columns columns Case_Sensitivity.Default reorder=True error_on_missing_columns=error_on_missing_columns on_problems=on_problems . catch No_Output_Columns _->
Error.throw No_Input_Columns_Selected
java_columns = key_columns.map .java_column
java_columns = key_columns.map c->c.java_column
text_folding_strategy = Case_Sensitivity.folding_strategy case_sensitivity
java_table = Illegal_Argument.handle_java_exception <|
Java_Problems.with_problem_aggregator on_problems java_aggregator->
@ -2381,7 +2388,7 @@ type Table
Problem_Behavior.Report_Warning.attach_problem_after truncated <|
Not_All_Rows_Downloaded.Warning max_rows
## ALIAS metadata
## ALIAS metadata, field info, column types
GROUP Standard.Base.Metadata
ICON metadata
Returns a Table describing this table's contents.
@ -2394,9 +2401,9 @@ type Table
import Standard.Examples
example_info = Examples.inventory_table.info
info : Table
info self =
example_info = Examples.inventory_table.column_info
column_info : Table
column_info self =
cols = self.columns
Table.new [["Column", cols.map .name], ["Items Count", cols.map .count], ["Value Type", cols.map .value_type]]
@ -2461,9 +2468,9 @@ type Table
selected_names = Map.from_vector (id_columns.map column-> [column.name, True])
data = columns_helper.internal_columns.filter column->(selected_names.get column.name False . not)
java_data = data.map .java_column
java_data = data.map c->c.java_column
java_id = id_columns.map .java_column
java_id = id_columns.map c->c.java_column
unique.mark_used (id_columns.map .name)
java_table = Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
@ -2560,7 +2567,7 @@ type Table
resolved_values.filter c->(c!=Nothing)
problem_builder.attach_problems_before on_problems <| Illegal_Argument.handle_java_exception <|
java_key_columns = grouping.map .java_column
java_key_columns = grouping.map c->c.java_column
name_mapper = if matched_name.is_empty then Aggregate_Column_Helper.default_aggregate_column_name else
if validated_values.length == 1 then (_ -> "") else
@ -2749,10 +2756,10 @@ type Table
columns_helper = self.columns_helper
problem_builder = Problem_Builder.new error_on_missing_columns=True
resolved_element_columns = columns_helper.select_columns_helper element_columns Case_Sensitivity.Default False problem_builder
java_element_columns = resolved_element_columns.map .java_column
java_element_columns = resolved_element_columns.map c->c.java_column
resolved_attribute_columns = columns_helper.select_columns_helper attribute_columns Case_Sensitivity.Default False problem_builder
java_attribute_column = resolved_attribute_columns.map .java_column
java_attribute_column = resolved_attribute_columns.map c->c.java_column
resolved_value_column = if value_column.is_nothing then Nothing else (self.at value_column)
java_value_column = if value_column.is_nothing then Nothing else resolved_value_column.java_column
@ -2911,8 +2918,7 @@ type Table
column_naming_helper : Column_Naming_Helper
column_naming_helper self = Column_Naming_Helper.in_memory
## ALIAS from
GROUP Standard.Base.Calculations
## GROUP Standard.Base.Calculations
ICON select_column
Appends records from the vector of tables into a single table.

View File

@ -26,5 +26,5 @@ prepare_visualization x =
mapped = interpolations.map value->
enso_type = Meta.get_qualified_type_name value
JS_Object.from_pairs [["value", value], ["enso_type", enso_type]]
dialect = x.connection.dialect.name
dialect = x.dialect_name
JS_Object.from_pairs [["dialect", dialect], ["code", code], ["interpolations", mapped]] . to_text

View File

@ -185,8 +185,21 @@ make_json_for_value val level=0 = case val of
truncated = val.keys.take 5 . map k-> k.to_text + ": " + (make_json_for_value (val.get k) level+1).to_text
prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated
"{" + (prepared.join ", ") + "}"
_ : Column -> make_json_for_value val.to_table level
_ : DB_Column -> make_json_for_value val.to_table level
_ : Column ->
if level != 0 then "Column{" +val.name + ": " + val.row_count + " rows}" else
items = make_json_for_value val.to_vector level
"Column{" + val.name + ": " + items + "}"
_ : Row ->
if level != 0 then "Row{" + val.table.column_count + " columns}" else
truncated = val.table.column_names.take 5 . map _.to_text
prepared = if val.table.column_count > 5 then truncated + ["… " + (val.table.column_count - 5).to_text+ " more"] else truncated
"Row{" + (prepared.join ", ") + "}"
_ : DB_Column ->
if level != 0 then "Column{" +val.name + ": " + val.row_count + " rows}" else
materialise = val.read 5 warn_if_more_rows=False
truncated = materialise . map k-> k.to_text + ": " + (make_json_for_value (val.get k) level+1).to_text
prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated
"Column{" + val.name + ": " + prepared + "}"
_ : Table ->
if level != 0 then "Table{" + val.row_count + " rows x " + val.column_count + " columns}" else
truncated = val.columns.take 5 . map _.name

View File

@ -1,6 +1,6 @@
package org.enso.languageserver.requesthandler.executioncontext
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.data.{
@ -8,15 +8,19 @@ import org.enso.languageserver.data.{
CapabilityRegistration,
ReceivesUpdates
}
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.ExecutionApi._
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
ExecutionApi,
RuntimeFailureMapper
}
import org.enso.languageserver.session.JsonSession
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestHandlerWithRetries,
UnhandledLogging
}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `executionContext/create` commands.
@ -29,51 +33,52 @@ class CreateHandler(
timeout: FiniteDuration,
contextRegistry: ActorRef,
session: JsonSession
) extends Actor
) extends RequestHandlerWithRetries[
Request[ExecutionContextCreate.type, ExecutionContextCreate.Params],
ContextRegistryProtocol.CreateContextResponse,
ContextRegistryProtocol.Failure,
ContextRegistryProtocol.CreateContextRequest
](contextRegistry, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(
msg: Request[
ExecutionApi.ExecutionContextCreate.type,
ExecutionContextCreate.Params
]
): ContextRegistryProtocol.CreateContextRequest =
ContextRegistryProtocol.CreateContextRequest(session, msg.params.contextId)
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(
ExecutionContextCreate,
id,
params: ExecutionContextCreate.Params
) =>
contextRegistry ! CreateContextRequest(session, params.contextId)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(id, sender(), cancellable))
}
private def responseStage(
id: Id,
protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
case CreateContextResponse(contextId) =>
val canModify = CapabilityRegistration(CanModify(contextId))
val receivesUpdates = CapabilityRegistration(ReceivesUpdates(contextId))
val result =
ExecutionContextCreate.Result(contextId, canModify, receivesUpdates)
replyTo ! ResponseResult(ExecutionContextCreate, id, result)
cancellable.cancel()
context.stop(self)
case error: ContextRegistryProtocol.Failure =>
replyTo ! ResponseError(Some(id), RuntimeFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
initialMsg: Request[
ExecutionApi.ExecutionContextCreate.type,
ExecutionContextCreate.Params
],
msg: ContextRegistryProtocol.CreateContextResponse
): Unit = {
val contextId = msg.contextId
val canModify = CapabilityRegistration(CanModify(contextId))
val receivesUpdates = CapabilityRegistration(ReceivesUpdates(contextId))
val result =
ExecutionContextCreate.Result(contextId, canModify, receivesUpdates)
replyTo ! ResponseResult(ExecutionContextCreate, initialMsg.id, result)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
ExecutionApi.ExecutionContextCreate.type,
ExecutionContextCreate.Params
],
error: ContextRegistryProtocol.Failure
)(implicit ec: ExecutionContext): Unit =
replyTo ! ResponseError(
Some(initialMsg.id),
RuntimeFailureMapper.mapFailure(error)
)
}
object CreateHandler {

View File

@ -1,17 +1,21 @@
package org.enso.languageserver.requesthandler.executioncontext
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.ExecutionApi._
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
ExecutionApi,
RuntimeFailureMapper
}
import org.enso.languageserver.session.JsonSession
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestHandlerWithRetries,
UnhandledLogging
}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `executionContext/destroy` commands.
@ -24,47 +28,46 @@ class DestroyHandler(
timeout: FiniteDuration,
contextRegistry: ActorRef,
session: JsonSession
) extends Actor
) extends RequestHandlerWithRetries[
Request[ExecutionContextDestroy.type, ExecutionContextDestroy.Params],
ContextRegistryProtocol.DestroyContextResponse,
ContextRegistryProtocol.Failure,
ContextRegistryProtocol.DestroyContextRequest
](contextRegistry, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(
msg: Request[
ExecutionApi.ExecutionContextDestroy.type,
ExecutionContextDestroy.Params
]
): ContextRegistryProtocol.DestroyContextRequest =
ContextRegistryProtocol.DestroyContextRequest(session, msg.params.contextId)
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(
ExecutionContextDestroy,
id,
params: ExecutionContextDestroy.Params
) =>
contextRegistry ! DestroyContextRequest(session, params.contextId)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(id, sender(), cancellable))
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
initialMsg: Request[
ExecutionApi.ExecutionContextDestroy.type,
ExecutionContextDestroy.Params
],
msg: ContextRegistryProtocol.DestroyContextResponse
): Unit =
replyTo ! ResponseResult(ExecutionContextDestroy, initialMsg.id, Unused)
case DestroyContextResponse(_) =>
replyTo ! ResponseResult(ExecutionContextDestroy, id, Unused)
cancellable.cancel()
context.stop(self)
case error: ContextRegistryProtocol.Failure =>
replyTo ! ResponseError(Some(id), RuntimeFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
ExecutionApi.ExecutionContextDestroy.type,
ExecutionContextDestroy.Params
],
error: ContextRegistryProtocol.Failure
)(implicit ec: ExecutionContext): Unit =
replyTo ! ResponseError(
Some(initialMsg.id),
RuntimeFailureMapper.mapFailure(error)
)
}
object DestroyHandler {

View File

@ -1,17 +1,21 @@
package org.enso.languageserver.requesthandler.executioncontext
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.data.ClientId
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.ExecutionApi._
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
ExecutionApi,
RuntimeFailureMapper
}
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestHandlerWithRetries,
UnhandledLogging
}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `executionContext/getComponentGroups` commands.
@ -24,51 +28,56 @@ class GetComponentGroupsHandler(
timeout: FiniteDuration,
contextRegistry: ActorRef,
clientId: ClientId
) extends Actor
) extends RequestHandlerWithRetries[
Request[
ExecutionContextGetComponentGroups.type,
ExecutionContextGetComponentGroups.Params
],
ContextRegistryProtocol.GetComponentGroupsResponse,
ContextRegistryProtocol.Failure,
ContextRegistryProtocol.GetComponentGroupsRequest
](contextRegistry, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(
msg: Request[
ExecutionApi.ExecutionContextGetComponentGroups.type,
ExecutionContextGetComponentGroups.Params
]
): ContextRegistryProtocol.GetComponentGroupsRequest =
ContextRegistryProtocol.GetComponentGroupsRequest(
clientId,
msg.params.contextId
)
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(
ExecutionContextGetComponentGroups,
id,
params: ExecutionContextGetComponentGroups.Params
) =>
contextRegistry ! GetComponentGroupsRequest(clientId, params.contextId)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(id, sender(), cancellable))
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
initialMsg: Request[
ExecutionApi.ExecutionContextGetComponentGroups.type,
ExecutionContextGetComponentGroups.Params
],
msg: ContextRegistryProtocol.GetComponentGroupsResponse
): Unit =
replyTo ! ResponseResult(
ExecutionContextGetComponentGroups,
initialMsg.id,
ExecutionContextGetComponentGroups.Result(msg.componentGroups)
)
case GetComponentGroupsResponse(componentGroups) =>
replyTo ! ResponseResult(
ExecutionContextGetComponentGroups,
id,
ExecutionContextGetComponentGroups.Result(componentGroups)
)
cancellable.cancel()
context.stop(self)
case error: ContextRegistryProtocol.Failure =>
replyTo ! ResponseError(Some(id), RuntimeFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
ExecutionApi.ExecutionContextGetComponentGroups.type,
ExecutionContextGetComponentGroups.Params
],
error: ContextRegistryProtocol.Failure
)(implicit ec: ExecutionContext): Unit =
replyTo ! ResponseError(
Some(initialMsg.id),
RuntimeFailureMapper.mapFailure(error)
)
}
object GetComponentGroupsHandler {

View File

@ -1,17 +1,21 @@
package org.enso.languageserver.requesthandler.executioncontext
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.ExecutionApi._
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
ExecutionApi,
RuntimeFailureMapper
}
import org.enso.languageserver.session.JsonSession
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestHandlerWithRetries,
UnhandledLogging
}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `executionContext/interrupt` commands.
@ -24,49 +28,49 @@ class InterruptHandler(
timeout: FiniteDuration,
contextRegistry: ActorRef,
session: JsonSession
) extends Actor
) extends RequestHandlerWithRetries[
Request[ExecutionContextInterrupt.type, ExecutionContextInterrupt.Params],
ContextRegistryProtocol.InterruptContextResponse,
ContextRegistryProtocol.Failure,
ContextRegistryProtocol.InterruptContextRequest
](contextRegistry, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import context.dispatcher
override protected def request(
msg: Request[
ExecutionApi.ExecutionContextInterrupt.type,
ExecutionContextInterrupt.Params
]
): ContextRegistryProtocol.InterruptContextRequest =
ContextRegistryProtocol.InterruptContextRequest(
session,
msg.params.contextId
)
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(
ExecutionContextInterrupt,
id,
params: ExecutionContextInterrupt.Params
) =>
contextRegistry ! ContextRegistryProtocol.InterruptContextRequest(
session,
params.contextId
)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(id, sender(), cancellable))
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
initialMsg: Request[
ExecutionApi.ExecutionContextInterrupt.type,
ExecutionContextInterrupt.Params
],
msg: ContextRegistryProtocol.InterruptContextResponse
): Unit =
replyTo ! ResponseResult(ExecutionContextInterrupt, initialMsg.id, Unused)
case ContextRegistryProtocol.InterruptContextResponse(_) =>
replyTo ! ResponseResult(ExecutionContextInterrupt, id, Unused)
cancellable.cancel()
context.stop(self)
case error: ContextRegistryProtocol.Failure =>
replyTo ! ResponseError(Some(id), RuntimeFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
ExecutionApi.ExecutionContextInterrupt.type,
ExecutionContextInterrupt.Params
],
error: ContextRegistryProtocol.Failure
)(implicit ec: ExecutionContext): Unit =
replyTo ! ResponseError(
Some(initialMsg.id),
RuntimeFailureMapper.mapFailure(error)
)
}
object InterruptHandler {

View File

@ -1,17 +1,21 @@
package org.enso.languageserver.requesthandler.executioncontext
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.ExecutionApi._
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
ExecutionApi,
RuntimeFailureMapper
}
import org.enso.languageserver.session.JsonSession
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestHandlerWithRetries,
UnhandledLogging
}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `executionContext/push` commands.
@ -24,43 +28,46 @@ class PopHandler(
timeout: FiniteDuration,
contextRegistry: ActorRef,
session: JsonSession
) extends Actor
) extends RequestHandlerWithRetries[
Request[ExecutionContextPop.type, ExecutionContextPop.Params],
ContextRegistryProtocol.PopContextResponse,
ContextRegistryProtocol.Failure,
ContextRegistryProtocol.PopContextRequest
](contextRegistry, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(
msg: Request[
ExecutionApi.ExecutionContextPop.type,
ExecutionContextPop.Params
]
): ContextRegistryProtocol.PopContextRequest =
ContextRegistryProtocol.PopContextRequest(session, msg.params.contextId)
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(ExecutionContextPop, id, params: ExecutionContextPop.Params) =>
contextRegistry ! PopContextRequest(session, params.contextId)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(id, sender(), cancellable))
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
initialMsg: Request[
ExecutionApi.ExecutionContextPop.type,
ExecutionContextPop.Params
],
msg: ContextRegistryProtocol.PopContextResponse
): Unit =
replyTo ! ResponseResult(ExecutionContextPop, initialMsg.id, Unused)
case PopContextResponse(_) =>
replyTo ! ResponseResult(ExecutionContextPop, id, Unused)
cancellable.cancel()
context.stop(self)
case error: ContextRegistryProtocol.Failure =>
replyTo ! ResponseError(Some(id), RuntimeFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
ExecutionApi.ExecutionContextPop.type,
ExecutionContextPop.Params
],
error: ContextRegistryProtocol.Failure
)(implicit ec: ExecutionContext): Unit =
replyTo ! ResponseError(
Some(initialMsg.id),
RuntimeFailureMapper.mapFailure(error)
)
}
object PopHandler {

View File

@ -1,17 +1,21 @@
package org.enso.languageserver.requesthandler.executioncontext
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.ExecutionApi._
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
ExecutionApi,
RuntimeFailureMapper
}
import org.enso.languageserver.session.JsonSession
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestHandlerWithRetries,
UnhandledLogging
}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `executionContext/push` commands.
@ -24,51 +28,50 @@ class PushHandler(
timeout: FiniteDuration,
contextRegistry: ActorRef,
session: JsonSession
) extends Actor
) extends RequestHandlerWithRetries[
Request[ExecutionContextPush.type, ExecutionContextPush.Params],
ContextRegistryProtocol.PushContextResponse,
ContextRegistryProtocol.Failure,
ContextRegistryProtocol.PushContextRequest
](contextRegistry, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(
msg: Request[
ExecutionApi.ExecutionContextPush.type,
ExecutionContextPush.Params
]
): ContextRegistryProtocol.PushContextRequest =
ContextRegistryProtocol.PushContextRequest(
session,
msg.params.contextId,
msg.params.stackItem
)
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(
ExecutionContextPush,
id,
params: ExecutionContextPush.Params
) =>
contextRegistry ! PushContextRequest(
session,
params.contextId,
params.stackItem
)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(id, sender(), cancellable))
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
initialMsg: Request[
ExecutionApi.ExecutionContextPush.type,
ExecutionContextPush.Params
],
msg: ContextRegistryProtocol.PushContextResponse
): Unit =
replyTo ! ResponseResult(ExecutionContextPush, initialMsg.id, Unused)
case PushContextResponse(_) =>
replyTo ! ResponseResult(ExecutionContextPush, id, Unused)
cancellable.cancel()
context.stop(self)
case error: ContextRegistryProtocol.Failure =>
replyTo ! ResponseError(Some(id), RuntimeFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
ExecutionApi.ExecutionContextPush.type,
ExecutionContextPush.Params
],
error: ContextRegistryProtocol.Failure
)(implicit ec: ExecutionContext): Unit =
replyTo ! ResponseError(
Some(initialMsg.id),
RuntimeFailureMapper.mapFailure(error)
)
}
object PushHandler {

View File

@ -1,17 +1,21 @@
package org.enso.languageserver.requesthandler.executioncontext
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.ExecutionApi._
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
ExecutionApi,
RuntimeFailureMapper
}
import org.enso.languageserver.session.JsonSession
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestHandlerWithRetries,
UnhandledLogging
}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `executionContext/recompute` commands.
@ -24,52 +28,51 @@ class RecomputeHandler(
timeout: FiniteDuration,
contextRegistry: ActorRef,
session: JsonSession
) extends Actor
) extends RequestHandlerWithRetries[
Request[ExecutionContextRecompute.type, ExecutionContextRecompute.Params],
ContextRegistryProtocol.RecomputeContextResponse,
ContextRegistryProtocol.Failure,
ContextRegistryProtocol.RecomputeContextRequest
](contextRegistry, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(
msg: Request[
ExecutionApi.ExecutionContextRecompute.type,
ExecutionContextRecompute.Params
]
): ContextRegistryProtocol.RecomputeContextRequest =
ContextRegistryProtocol.RecomputeContextRequest(
session,
msg.params.contextId,
msg.params.invalidatedExpressions,
msg.params.executionEnvironment
)
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(
ExecutionContextRecompute,
id,
params: ExecutionContextRecompute.Params
) =>
contextRegistry ! RecomputeContextRequest(
session,
params.contextId,
params.invalidatedExpressions,
params.executionEnvironment
)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(id, sender(), cancellable))
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
initialMsg: Request[
ExecutionApi.ExecutionContextRecompute.type,
ExecutionContextRecompute.Params
],
msg: ContextRegistryProtocol.RecomputeContextResponse
): Unit =
replyTo ! ResponseResult(ExecutionContextRecompute, initialMsg.id, Unused)
case RecomputeContextResponse(_) =>
replyTo ! ResponseResult(ExecutionContextRecompute, id, Unused)
cancellable.cancel()
context.stop(self)
case error: ContextRegistryProtocol.Failure =>
replyTo ! ResponseError(Some(id), RuntimeFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
ExecutionApi.ExecutionContextRecompute.type,
ExecutionContextRecompute.Params
],
error: ContextRegistryProtocol.Failure
)(implicit ec: ExecutionContext): Unit =
replyTo ! ResponseError(
Some(initialMsg.id),
RuntimeFailureMapper.mapFailure(error)
)
}
object RecomputeHandler {

View File

@ -1,17 +1,21 @@
package org.enso.languageserver.requesthandler.executioncontext
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.ExecutionApi._
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
ExecutionApi,
RuntimeFailureMapper
}
import org.enso.languageserver.session.JsonSession
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestHandlerWithRetries,
UnhandledLogging
}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `executionContext/setExecutionEnvironment` commands.
@ -24,51 +28,53 @@ class SetExecutionEnvironmentHandler(
timeout: FiniteDuration,
contextRegistry: ActorRef,
session: JsonSession
) extends Actor
) extends RequestHandlerWithRetries[
Request[
ExecutionContextRecompute.type,
ExecutionContextSetExecutionEnvironment.Params
],
ContextRegistryProtocol.SetExecutionEnvironmentResponse,
ContextRegistryProtocol.Failure,
ContextRegistryProtocol.SetExecutionEnvironmentRequest
](contextRegistry, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(
msg: Request[
ExecutionApi.ExecutionContextRecompute.type,
ExecutionContextSetExecutionEnvironment.Params
]
): ContextRegistryProtocol.SetExecutionEnvironmentRequest =
ContextRegistryProtocol.SetExecutionEnvironmentRequest(
session,
msg.params.contextId,
msg.params.executionEnvironment
)
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(
ExecutionContextSetExecutionEnvironment,
id,
params: ExecutionContextSetExecutionEnvironment.Params
) =>
contextRegistry ! SetExecutionEnvironmentRequest(
session,
params.contextId,
params.executionEnvironment
)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(id, sender(), cancellable))
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
initialMsg: Request[
ExecutionApi.ExecutionContextRecompute.type,
ExecutionContextSetExecutionEnvironment.Params
],
msg: ContextRegistryProtocol.SetExecutionEnvironmentResponse
): Unit =
replyTo ! ResponseResult(ExecutionContextRecompute, initialMsg.id, Unused)
case SetExecutionEnvironmentResponse(_) =>
replyTo ! ResponseResult(ExecutionContextRecompute, id, Unused)
cancellable.cancel()
context.stop(self)
case error: ContextRegistryProtocol.Failure =>
replyTo ! ResponseError(Some(id), RuntimeFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
ExecutionApi.ExecutionContextRecompute.type,
ExecutionContextSetExecutionEnvironment.Params
],
error: ContextRegistryProtocol.Failure
)(implicit ec: ExecutionContext): Unit =
replyTo ! ResponseError(
Some(initialMsg.id),
RuntimeFailureMapper.mapFailure(error)
)
}
object SetExecutionEnvironmentHandler {

View File

@ -1,82 +1,83 @@
package org.enso.languageserver.requesthandler.refactoring
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.refactoring.RefactoringApi.{
ProjectRenameFailed,
RenameProject
}
import org.enso.languageserver.refactoring.RefactoringProtocol
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.refactoring.{RefactoringApi, RefactoringProtocol}
import org.enso.languageserver.util.{
RequestToApiHandlerWithRetries,
UnhandledLogging
}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `refactoring/renameProject` commands.
*
* @param timeout a request timeout
* @param runtimeConnector a reference to the runtime connector
* @param target a reference to the runtime connector
*/
class RenameProjectHandler(timeout: FiniteDuration, runtimeConnector: ActorRef)
extends Actor
class RenameProjectHandler(timeout: FiniteDuration, target: ActorRef)
extends RequestToApiHandlerWithRetries[
Request[RenameProject.type, RenameProject.Params],
Api.ProjectRenamed,
Api.Error,
Api.Request
](target, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import context.dispatcher
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(RenameProject, id, params: RenameProject.Params) =>
val payload =
Api.RenameProject(params.namespace, params.oldName, params.newName)
runtimeConnector ! Api.Request(UUID.randomUUID(), payload)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(
responseStage(
id,
sender(),
cancellable
)
)
override protected def request(
msg: Request[RefactoringApi.RenameProject.type, RenameProject.Params]
): Api.Request = {
val payload = Api.RenameProject(
msg.params.namespace,
msg.params.oldName,
msg.params.newName
)
Api.Request(UUID.randomUUID(), payload)
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
case Api.Response(
_,
Api.ProjectRenamed(oldNormalizedName, newNormalizedName, newName)
) =>
replyTo ! ResponseResult(RenameProject, id, Unused)
context.system.eventStream.publish(
RefactoringProtocol.ProjectRenamedNotification(
oldNormalizedName,
newNormalizedName,
newName
)
initialMsg: Request[
RefactoringApi.RenameProject.type,
RenameProject.Params
],
msg: Api.ProjectRenamed
): Unit = {
val Api.ProjectRenamed(oldNormalizedName, newNormalizedName, newName) = msg
replyTo ! ResponseResult(RenameProject, initialMsg.id, Unused)
context.system.eventStream.publish(
RefactoringProtocol.ProjectRenamedNotification(
oldNormalizedName,
newNormalizedName,
newName
)
cancellable.cancel()
context.stop(self)
case Api.Response(_, Api.ProjectRenameFailed(oldName, newName)) =>
replyTo ! ResponseError(Some(id), ProjectRenameFailed(oldName, newName))
cancellable.cancel()
context.stop(self)
)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[
RefactoringApi.RenameProject.type,
RenameProject.Params
],
error: Api.Error
)(implicit ec: ExecutionContext): Unit = {
val Api.ProjectRenameFailed(oldName, newName) = error
replyTo ! ResponseError(
Some(initialMsg.id),
ProjectRenameFailed(oldName, newName)
)
}
}
object RenameProjectHandler {

View File

@ -1,84 +1,82 @@
package org.enso.languageserver.requesthandler.refactoring
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import org.enso.jsonrpc._
import org.enso.languageserver.refactoring.RefactoringApi.RenameSymbol
import org.enso.languageserver.refactoring.RenameFailureMapper
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.refactoring.{RefactoringApi, RenameFailureMapper}
import org.enso.languageserver.runtime.ExecutionApi
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{
RequestToApiHandlerWithRetries,
UnhandledLogging
}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for `refactoring/renameSymbol` commands.
*
* @param timeout a request timeout
* @param runtimeConnector a reference to the runtime connector
* @param runtime a reference to the runtime connector
*/
class RenameSymbolHandler(
timeout: FiniteDuration,
runtimeConnector: ActorRef
) extends Actor
runtime: ActorRef
) extends RequestToApiHandlerWithRetries[
Request[RenameSymbol.type, RenameSymbol.Params],
Api.SymbolRenamed,
Api.Error,
Api.Request
](runtime, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import context.dispatcher
override def receive: Receive = requestStage
private def requestStage: Receive = {
case Request(RenameSymbol, id, params: RenameSymbol.Params) =>
val payload =
Api.RenameSymbol(params.module, params.expressionId, params.newName)
runtimeConnector ! Api.Request(UUID.randomUUID(), payload)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(
responseStage(
id,
sender(),
cancellable
)
)
override protected def request(
msg: Request[RefactoringApi.RenameSymbol.type, RenameSymbol.Params]
): Api.Request = {
val Request(RenameSymbol, _, params: RenameSymbol.Params) = msg
val payload =
Api.RenameSymbol(params.module, params.expressionId, params.newName)
Api.Request(UUID.randomUUID(), payload)
}
private def responseStage(
id: Id,
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.error("Request [{}] timed out.", id)
replyTo ! ResponseError(Some(id), Errors.RequestTimeout)
context.stop(self)
case Api.Response(_, Api.SymbolRenamed(newName)) =>
replyTo ! ResponseResult(
RenameSymbol,
id,
RenameSymbol.Result(newName)
)
cancellable.cancel()
context.stop(self)
case Api.Response(_, Api.ModuleNotFound(moduleName)) =>
replyTo ! ResponseError(
Some(id),
ExecutionApi.ModuleNotFoundError(moduleName)
)
cancellable.cancel()
context.stop(self)
case Api.Response(_, Api.SymbolRenameFailed(error)) =>
replyTo ! ResponseError(Some(id), RenameFailureMapper.mapFailure(error))
cancellable.cancel()
context.stop(self)
initialMsg: Request[RenameSymbol.type, RenameSymbol.Params],
msg: Api.SymbolRenamed
): Unit = {
replyTo ! ResponseResult(
RenameSymbol,
initialMsg.id,
RenameSymbol.Result(msg.newName)
)
}
override protected def negativeResponse(
replyTo: ActorRef,
initialMsg: Request[RenameSymbol.type, RenameSymbol.Params],
error: Api.Error
)(implicit
ec: ExecutionContext
): Unit = {
error match {
case Api.ModuleNotFound(moduleName) =>
replyTo ! ResponseError(
Some(initialMsg.id),
ExecutionApi.ModuleNotFoundError(moduleName)
)
case Api.SymbolRenameFailed(error) =>
replyTo ! ResponseError(
Some(initialMsg.id),
RenameFailureMapper.mapFailure(error)
)
case _ =>
logger.error(s"unexpected error response $error")
}
}
}
object RenameSymbolHandler {

View File

@ -14,6 +14,7 @@ import cats.implicits._
import org.enso.polyglot.runtime.Runtime.Api.{DiagnosticType, ExecutionResult}
import java.io.File
import java.lang.InternalError
import scala.concurrent.{ExecutionContext, Future}
final class RuntimeFailureMapper(contentRootManager: ContentRootManager) {
@ -27,18 +28,17 @@ final class RuntimeFailureMapper(contentRootManager: ContentRootManager) {
def mapApiError(
error: Api.Error
)(implicit ec: ExecutionContext): Future[ContextRegistryProtocol.Failure] = {
implicit def liftToFuture(
result: ContextRegistryProtocol.Failure
): Future[ContextRegistryProtocol.Failure] = Future.successful(result)
error match {
case Api.ContextNotExistError(contextId) =>
ContextRegistryProtocol.ContextNotFound(contextId)
Future.successful(ContextRegistryProtocol.ContextNotFound(contextId))
case Api.EmptyStackError(contextId) =>
ContextRegistryProtocol.EmptyStackError(contextId)
Future.successful(ContextRegistryProtocol.EmptyStackError(contextId))
case Api.InvalidStackItemError(contextId) =>
ContextRegistryProtocol.InvalidStackItemError(contextId)
Future.successful(
ContextRegistryProtocol.InvalidStackItemError(contextId)
)
case Api.ModuleNotFound(moduleName) =>
ContextRegistryProtocol.ModuleNotFound(moduleName)
Future.successful(ContextRegistryProtocol.ModuleNotFound(moduleName))
case Api.VisualizationExpressionFailed(ctx, message, result) =>
for (diagnostic <- result.map(toProtocolDiagnostic).sequence)
yield ContextRegistryProtocol.VisualizationExpressionFailed(
@ -51,7 +51,9 @@ final class RuntimeFailureMapper(contentRootManager: ContentRootManager) {
diagnostic
)
case Api.VisualizationNotFound() =>
ContextRegistryProtocol.VisualizationNotFound
Future.successful(ContextRegistryProtocol.VisualizationNotFound)
case e =>
Future.failed(new InternalError(s"unexpected error $e"))
}
}

View File

@ -1,63 +1,50 @@
package org.enso.languageserver.runtime.handler
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import akka.pattern.pipe
import com.typesafe.scalalogging.LazyLogging
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
RuntimeFailureMapper
}
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{ApiHandlerWithRetries, UnhandledLogging}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for create context commands.
*
* @param runtimeFailureMapper mapper for runtime failures
* @param timeout request timeout
* @param runtime reference to the runtime connector
* @param target reference to the runtime connector
*/
final class CreateContextHandler(
runtimeFailureMapper: RuntimeFailureMapper,
timeout: FiniteDuration,
runtime: ActorRef
) extends Actor
target: ActorRef
) extends ApiHandlerWithRetries[
Api.CreateContextRequest,
Api.CreateContextResponse
](target, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(msg: Api.CreateContextRequest): Api.Request =
Api.Request(UUID.randomUUID(), msg)
override def receive: Receive = requestStage
private def requestStage: Receive = { case msg: Api.CreateContextRequest =>
runtime ! Api.Request(UUID.randomUUID(), msg)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), cancellable))
}
private def responseStage(
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
replyTo ! RequestTimeout
context.stop(self)
msg: Api.CreateContextResponse
): Unit =
replyTo ! ContextRegistryProtocol.CreateContextResponse(msg.contextId)
case Api.Response(_, Api.CreateContextResponse(contextId)) =>
replyTo ! CreateContextResponse(contextId)
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: Api.Error) =>
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(replyTo: ActorRef, error: Api.Error)(
implicit ec: ExecutionContext
): Unit =
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
}
object CreateContextHandler {

View File

@ -1,17 +1,17 @@
package org.enso.languageserver.runtime.handler
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import akka.pattern.pipe
import com.typesafe.scalalogging.LazyLogging
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
RuntimeFailureMapper
}
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{ApiHandlerWithRetries, UnhandledLogging}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for destroy context commands.
@ -24,40 +24,27 @@ final class DestroyContextHandler(
runtimeFailureMapper: RuntimeFailureMapper,
timeout: FiniteDuration,
runtime: ActorRef
) extends Actor
) extends ApiHandlerWithRetries[
Api.DestroyContextRequest,
Api.DestroyContextResponse
](runtime, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(msg: Api.DestroyContextRequest): Api.Request =
Api.Request(UUID.randomUUID(), msg)
override def receive: Receive = requestStage
private def requestStage: Receive = { case msg: Api.DestroyContextRequest =>
runtime ! Api.Request(UUID.randomUUID(), msg)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), cancellable))
}
private def responseStage(
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
replyTo ! RequestTimeout
context.stop(self)
msg: Api.DestroyContextResponse
): Unit =
replyTo ! ContextRegistryProtocol.DestroyContextResponse(msg.contextId)
case Api.Response(_, Api.DestroyContextResponse(contextId)) =>
replyTo ! DestroyContextResponse(contextId)
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: Api.Error) =>
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(replyTo: ActorRef, error: Api.Error)(
implicit ec: ExecutionContext
): Unit =
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
}
object DestroyContextHandler {

View File

@ -1,17 +1,17 @@
package org.enso.languageserver.runtime.handler
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import akka.pattern.pipe
import com.typesafe.scalalogging.LazyLogging
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
RuntimeFailureMapper
}
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{ApiHandlerWithRetries, UnhandledLogging}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for push commands.
@ -24,39 +24,27 @@ final class PopContextHandler(
runtimeFailureMapper: RuntimeFailureMapper,
timeout: FiniteDuration,
runtime: ActorRef
) extends Actor
) extends ApiHandlerWithRetries[
Api.PopContextRequest,
Api.PopContextResponse
](runtime, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import context.dispatcher
override protected def request(msg: Api.PopContextRequest): Api.Request =
Api.Request(UUID.randomUUID(), msg)
override def receive: Receive = requestStage
private def requestStage: Receive = { case msg: Api.PopContextRequest =>
runtime ! Api.Request(UUID.randomUUID(), msg)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), cancellable))
}
private def responseStage(
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
replyTo ! RequestTimeout
context.stop(self)
msg: Api.PopContextResponse
): Unit =
replyTo ! ContextRegistryProtocol.PopContextResponse(msg.contextId)
case Api.Response(_, Api.PopContextResponse(contextId)) =>
replyTo ! ContextRegistryProtocol.PopContextResponse(contextId)
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: Api.Error) =>
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(replyTo: ActorRef, error: Api.Error)(
implicit ec: ExecutionContext
): Unit =
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
}
object PopContextHandler {

View File

@ -1,17 +1,17 @@
package org.enso.languageserver.runtime.handler
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import akka.pattern.pipe
import com.typesafe.scalalogging.LazyLogging
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
RuntimeFailureMapper
}
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{ApiHandlerWithRetries, UnhandledLogging}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for push commands.
@ -24,39 +24,27 @@ final class PushContextHandler(
runtimeFailureMapper: RuntimeFailureMapper,
timeout: FiniteDuration,
runtime: ActorRef
) extends Actor
) extends ApiHandlerWithRetries[
Api.PushContextRequest,
Api.PushContextResponse
](runtime, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import context.dispatcher
override protected def request(msg: Api.PushContextRequest): Api.Request =
Api.Request(UUID.randomUUID(), msg)
override def receive: Receive = requestStage
private def requestStage: Receive = { case msg: Api.PushContextRequest =>
runtime ! Api.Request(UUID.randomUUID(), msg)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), cancellable))
}
private def responseStage(
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
replyTo ! RequestTimeout
context.stop(self)
msg: Api.PushContextResponse
): Unit =
replyTo ! ContextRegistryProtocol.PushContextResponse(msg.contextId)
case Api.Response(_, Api.PushContextResponse(contextId)) =>
replyTo ! ContextRegistryProtocol.PushContextResponse(contextId)
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: Api.Error) =>
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(replyTo: ActorRef, error: Api.Error)(
implicit ec: ExecutionContext
): Unit =
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
}
object PushContextHandler {

View File

@ -1,17 +1,17 @@
package org.enso.languageserver.runtime.handler
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import akka.pattern.pipe
import com.typesafe.scalalogging.LazyLogging
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
RuntimeFailureMapper
}
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{ApiHandlerWithRetries, UnhandledLogging}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for recompute commands.
@ -24,40 +24,29 @@ final class RecomputeContextHandler(
runtimeFailureMapper: RuntimeFailureMapper,
timeout: FiniteDuration,
runtime: ActorRef
) extends Actor
) extends ApiHandlerWithRetries[
Api.RecomputeContextRequest,
Api.RecomputeContextResponse
](runtime, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import ContextRegistryProtocol._
import context.dispatcher
override protected def request(
msg: Api.RecomputeContextRequest
): Api.Request =
Api.Request(UUID.randomUUID(), msg)
override def receive: Receive = requestStage
private def requestStage: Receive = { case msg: Api.RecomputeContextRequest =>
runtime ! Api.Request(UUID.randomUUID(), msg)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), cancellable))
}
private def responseStage(
override protected def positiveResponse(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
replyTo ! RequestTimeout
context.stop(self)
msg: Api.RecomputeContextResponse
): Unit =
replyTo ! ContextRegistryProtocol.RecomputeContextResponse(msg.contextId)
case Api.Response(_, Api.RecomputeContextResponse(contextId)) =>
replyTo ! RecomputeContextResponse(contextId)
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: Api.Error) =>
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
cancellable.cancel()
context.stop(self)
}
override protected def negativeResponse(replyTo: ActorRef, error: Api.Error)(
implicit ec: ExecutionContext
): Unit =
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
}
object RecomputeContextHandler {

View File

@ -1,18 +1,17 @@
package org.enso.languageserver.runtime.handler
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import akka.pattern.pipe
import com.typesafe.scalalogging.LazyLogging
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.{
ContextRegistryProtocol,
RuntimeFailureMapper
}
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{ApiHandlerWithRetries, UnhandledLogging}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
/** A request handler for setting execution context command.
@ -25,40 +24,31 @@ final class SetExecutionContextEnvironmentHandler(
runtimeFailureMapper: RuntimeFailureMapper,
timeout: FiniteDuration,
runtime: ActorRef
) extends Actor
) extends ApiHandlerWithRetries[
Api.SetExecutionEnvironmentRequest,
Api.SetExecutionEnvironmentResponse
](runtime, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
override protected def request(
msg: Api.SetExecutionEnvironmentRequest
): Api.Request =
Api.Request(UUID.randomUUID(), msg)
import ContextRegistryProtocol._
import context.dispatcher
override def receive: Receive = requestStage
private def requestStage: Receive = {
case msg: Api.SetExecutionEnvironmentRequest =>
runtime ! Api.Request(UUID.randomUUID(), msg)
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), cancellable))
override protected def positiveResponse(
replyTo: ActorRef,
msg: Api.SetExecutionEnvironmentResponse
): Unit = {
replyTo ! ContextRegistryProtocol.SetExecutionEnvironmentResponse(
msg.contextId
)
}
private def responseStage(
replyTo: ActorRef,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
replyTo ! RequestTimeout
context.stop(self)
case Api.Response(_, Api.SetExecutionEnvironmentResponse(contextId)) =>
replyTo ! SetExecutionEnvironmentResponse(contextId)
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: Api.Error) =>
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
cancellable.cancel()
context.stop(self)
override protected def negativeResponse(replyTo: ActorRef, error: Api.Error)(
implicit ec: ExecutionContext
): Unit = {
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
}
}

View File

@ -197,7 +197,7 @@ final class SuggestionsHandler(
case msg: Api.SuggestionsDatabaseSuggestionsLoadedNotification =>
logger.debug(
"Starting loading suggestions for library [{0}].",
"Starting loading suggestions for library [{}].",
msg.libraryName
)
context.become(
@ -212,7 +212,7 @@ final class SuggestionsHandler(
.onComplete {
case Success(notification) =>
logger.debug(
"Complete loading suggestions for library [{0}]. Has updates: {1}",
"Complete loading suggestions for library [{}]. Has updates: {}",
msg.libraryName,
notification.updates.nonEmpty
)
@ -224,7 +224,7 @@ final class SuggestionsHandler(
self ! SuggestionsHandler.SuggestionLoadingCompleted
case Failure(ex) =>
logger.error(
"Error applying suggestion updates for loaded library [{0}].",
"Error applying suggestion updates for loaded library [{}].",
msg.libraryName,
ex
)

View File

@ -1,15 +1,15 @@
package org.enso.languageserver.search.handler
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.actor.{Actor, ActorRef, Props}
import akka.pattern.pipe
import com.typesafe.scalalogging.LazyLogging
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.languageserver.runtime.RuntimeFailureMapper
import org.enso.languageserver.search.SearchProtocol
import org.enso.languageserver.util.UnhandledLogging
import org.enso.languageserver.util.{ApiHandlerWithRetries, UnhandledLogging}
import org.enso.polyglot.runtime.Runtime.Api
import java.util.UUID
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
/** A request handler for invalidate modules index command.
@ -24,51 +24,35 @@ final class InvalidateModulesIndexHandler(
runtime: ActorRef,
suggestionsHandler: ActorRef,
timeout: FiniteDuration
) extends Actor
) extends ApiHandlerWithRetries[
SearchProtocol.InvalidateModulesIndex.type,
Api.InvalidateModulesIndexResponse
](runtime, timeout)
with Actor
with LazyLogging
with UnhandledLogging {
import context.dispatcher
override def receive: Receive = requestStage
private def requestStage: Receive = {
case SearchProtocol.InvalidateModulesIndex =>
val request = Api.Request(
UUID.randomUUID(),
Api.InvalidateModulesIndexRequest()
)
runtime ! request
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), request, cancellable))
override protected def request(
msg: SearchProtocol.InvalidateModulesIndex.type
): Api.Request = {
Api.Request(
UUID.randomUUID(),
Api.InvalidateModulesIndexRequest()
)
}
private def responseStage(
override protected def positiveResponse(
replyTo: ActorRef,
request: Api.Request,
cancellable: Cancellable
): Receive = {
case RequestTimeout =>
logger.warn(
"Failed to receive a [{}] response in [{}].",
request,
timeout
)
val newCancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(replyTo, request, newCancellable))
msg: Api.InvalidateModulesIndexResponse
): Unit = {
suggestionsHandler ! SearchProtocol.ClearSuggestionsDatabase
replyTo ! SearchProtocol.InvalidateSuggestionsDatabaseResult
}
case Api.Response(_, Api.InvalidateModulesIndexResponse()) =>
suggestionsHandler ! SearchProtocol.ClearSuggestionsDatabase
replyTo ! SearchProtocol.InvalidateSuggestionsDatabaseResult
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: Api.Error) =>
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
cancellable.cancel()
context.stop(self)
override protected def negativeResponse(replyTo: ActorRef, error: Api.Error)(
implicit ec: ExecutionContext
): Unit = {
runtimeFailureMapper.mapApiError(error).pipeTo(replyTo)
}
}

View File

@ -0,0 +1,318 @@
package org.enso.languageserver.util
import akka.actor.{Actor, ActorRef, Cancellable}
import com.typesafe.scalalogging.LazyLogging
import org.enso.languageserver.requesthandler.RequestTimeout
import org.enso.polyglot.runtime.Runtime.Api
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
import scala.reflect.ClassTag
/** Handler base class with retries support.
*
* @tparam RequestType type of the message being handled
* @tparam ResponseType type of the message expected when message is successfully handled
* @tparam ErrorType type of the message expected when message has failed to be handled
* @tparam ForwardedPayloadType type of the message forwarded to `target`
*/
abstract class HandlerWithRetries[
RequestType,
ResponseType,
ErrorType,
ForwardedPayloadType
] {
a: Actor with LazyLogging =>
override def receive: Receive = requestStage
protected def request(msg: RequestType): ForwardedPayloadType
protected def requestStage: Receive
}
/** API handler base class with retries support.
*
* @param target target actor which will handle a message
* @param timeout timeout for serving a message
* @param retries number of retries attempted on timeout before aborting
* @tparam RequestType type of the message being handled
* @tparam ResponseType type of the message expected when message is successfully handled
*/
abstract class ApiHandlerWithRetries[
RequestType: ClassTag,
ResponseType: ClassTag
](target: ActorRef, timeout: FiniteDuration, retries: Int)
extends HandlerWithRetries[
RequestType,
ResponseType,
Api.Error,
Api.Request
] {
a: Actor with LazyLogging =>
def this(runtime: ActorRef, timeout: FiniteDuration) = {
this(runtime, timeout, 10)
}
import context.dispatcher
override def receive: Receive = requestStage
protected def request(msg: RequestType): Api.Request
protected def requestStage: Receive = { case msg: RequestType =>
val req = request(msg)
target ! req
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), req, cancellable, retries))
}
private def responseStage(
replyTo: ActorRef,
forwardedRequest: Api.Request,
cancellable: Cancellable,
retries: Int
): Receive = {
case RequestTimeout =>
if (retries > 0) {
logger.warn(
"Failed to receive a [{}] response in [{}]. Retrying.",
forwardedRequest,
timeout
)
val newCancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(
responseStage(
replyTo,
forwardedRequest,
newCancellable,
retries - 1
)
)
} else {
replyTo ! RequestTimeout
context.stop(self)
}
case Api.Response(_, msg: ResponseType) =>
positiveResponse(replyTo, msg)
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: Api.Error) =>
negativeResponse(replyTo, error)
cancellable.cancel()
context.stop(self)
}
protected def positiveResponse(replyTo: ActorRef, msg: ResponseType): Unit
protected def negativeResponse(replyTo: ActorRef, error: Api.Error)(implicit
ec: ExecutionContext
): Unit
}
/** Request handler base class with retries support.
*
* @param target target actor which will handle a message
* @param timeout timeout for serving a message
* @param retries number of retries attempted on timeout before aborting
* @tparam RequestType type of the message being handled
* @tparam ResponseType type of the message expected when message is successfully handled
* @tparam ErrorType type of the message expected when message has failed to be handled
* @tparam ForwardedPayloadType type of the message forwarded to `target`
*/
abstract class RequestHandlerWithRetries[
RequestType: ClassTag,
ResponseType: ClassTag,
ErrorType: ClassTag,
ForwardedPayloadType
](target: ActorRef, timeout: FiniteDuration, retries: Int)
extends HandlerWithRetries[
RequestType,
ResponseType,
ErrorType,
ForwardedPayloadType
] {
a: Actor with LazyLogging =>
def this(runtime: ActorRef, timeout: FiniteDuration) = {
this(runtime, timeout, 10)
}
import context.dispatcher
override def receive: Receive = requestStage
protected def request(msg: RequestType): ForwardedPayloadType
protected def requestStage: Receive = { case msg: RequestType =>
val req = request(msg)
target ! req
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), msg, req, cancellable, retries))
}
private def responseStage(
replyTo: ActorRef,
initialMsg: RequestType,
forwardedRequest: ForwardedPayloadType,
cancellable: Cancellable,
retries: Int
): Receive = {
case RequestTimeout =>
if (retries > 0) {
logger.warn(
"Failed to receive a [{}] response in [{}]. Retrying.",
forwardedRequest,
timeout
)
val newCancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(
responseStage(
replyTo,
initialMsg,
forwardedRequest,
newCancellable,
retries - 1
)
)
} else {
replyTo ! RequestTimeout
context.stop(self)
}
case msg: ResponseType =>
positiveResponse(replyTo, initialMsg, msg)
cancellable.cancel()
context.stop(self)
case error: ErrorType =>
negativeResponse(replyTo, initialMsg, error)
cancellable.cancel()
context.stop(self)
}
protected def positiveResponse(
replyTo: ActorRef,
initialMsg: RequestType,
msg: ResponseType
): Unit
protected def negativeResponse(
replyTo: ActorRef,
initialMsg: RequestType,
error: ErrorType
)(implicit
ec: ExecutionContext
): Unit
}
/** Request handler base class with retries support. Handler forwards messages directly to runtime.
*
* @param target target actor which will handle a message
* @param timeout timeout for serving a message
* @param retries number of retries attempted on timeout before aborting
* @tparam RequestType type of the message being handled
* @tparam ResponseType type of the message expected when message is successfully handled
* @tparam ErrorType type of the message expected when message has failed to be handled
* @tparam ForwardedPayloadType type of the message forwarded to `target`
*/
abstract class RequestToApiHandlerWithRetries[
RequestType: ClassTag,
ResponseType: ClassTag,
ErrorType: ClassTag,
ForwardedPayloadType
](target: ActorRef, timeout: FiniteDuration, retries: Int)
extends HandlerWithRetries[
RequestType,
ResponseType,
ErrorType,
ForwardedPayloadType
] {
a: Actor with LazyLogging =>
def this(runtime: ActorRef, timeout: FiniteDuration) = {
this(runtime, timeout, 10)
}
import context.dispatcher
override def receive: Receive = requestStage
protected def request(msg: RequestType): ForwardedPayloadType
protected def requestStage: Receive = { case msg: RequestType =>
val req = request(msg)
target ! req
val cancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(responseStage(sender(), msg, req, cancellable, retries))
}
private def responseStage(
replyTo: ActorRef,
initialMsg: RequestType,
forwardedRequest: ForwardedPayloadType,
cancellable: Cancellable,
retries: Int
): Receive = {
case RequestTimeout =>
if (retries > 0) {
logger.warn(
"Failed to receive a [{}] response in [{}]. Retrying.",
forwardedRequest,
timeout
)
val newCancellable =
context.system.scheduler.scheduleOnce(timeout, self, RequestTimeout)
context.become(
responseStage(
replyTo,
initialMsg,
forwardedRequest,
newCancellable,
retries - 1
)
)
} else {
replyTo ! RequestTimeout
context.stop(self)
}
case Api.Response(_, msg: ResponseType) =>
positiveResponse(replyTo, initialMsg, msg)
cancellable.cancel()
context.stop(self)
case Api.Response(_, error: ErrorType) =>
negativeResponse(replyTo, initialMsg, error)
cancellable.cancel()
context.stop(self)
}
protected def positiveResponse(
replyTo: ActorRef,
initialMsg: RequestType,
msg: ResponseType
): Unit
protected def negativeResponse(
replyTo: ActorRef,
initialMsg: RequestType,
error: ErrorType
)(implicit
ec: ExecutionContext
): Unit
}

View File

@ -1679,13 +1679,13 @@ object Runtime {
newName: String
) extends ApiResponse
/** Signals that project has been renamed.
/** Signals that project has failed to be renamed.
*
* @param oldName the old name of the project
* @param newName the new name of the project
*/
final case class ProjectRenameFailed(oldName: String, newName: String)
extends ApiResponse
extends Error
/** A request for symbol renaming.
*
@ -1710,7 +1710,7 @@ object Runtime {
* @param error the error that happened
*/
final case class SymbolRenameFailed(error: SymbolRenameFailed.Error)
extends ApiResponse
extends Error
object SymbolRenameFailed {

View File

@ -42,6 +42,7 @@ public final class PrivateConstructorAnalysis implements IRPass {
}
@Override
@SuppressWarnings("unchecked")
public Seq<IRPass> invalidatedPasses() {
Object obj = scala.collection.immutable.Nil$.MODULE$;
return (scala.collection.immutable.List<IRPass>) obj;

View File

@ -2,6 +2,7 @@ package org.enso.ydoc;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.enso.ydoc.polyfill.ParserPolyfill;
import org.enso.ydoc.polyfill.web.WebEnvironment;
import org.graalvm.polyglot.Source;
@ -14,30 +15,40 @@ public class Main {
private Main() {}
public static void main(String[] args) throws Exception {
System.setProperty("helidon.serialFilter.pattern", "javax.management.**;java.lang.**;java.rmi.**;javax.security.auth.Subject;!*");
System.setProperty(
"helidon.serialFilter.pattern",
"javax.management.**;java.lang.**;java.rmi.**;javax.security.auth.Subject;!*");
var ydoc = Main.class.getResource(YDOC_SERVER_PATH);
var contextBuilder = WebEnvironment.createContext().allowIO(IOAccess.ALL);
Sampling.init();
try (var executor = Executors.newSingleThreadExecutor();
var parser = new ParserPolyfill()) {
var ydocJs = Source.newBuilder("js", ydoc).mimeType("application/javascript+module").build();
CompletableFuture.supplyAsync(contextBuilder::build, executor)
.thenAcceptAsync(
ctx -> {
WebEnvironment.initialize(ctx, executor);
parser.initialize(ctx);
ctx.eval(ydocJs);
},
executor)
.get();
// Can't use try-with-resource in ExecutorService because API was added in JDK19
var executor = Executors.newSingleThreadExecutor();
try {
try (var parser = new ParserPolyfill()) {
var ydocJs =
Source.newBuilder("js", ydoc).mimeType("application/javascript+module").build();
CompletableFuture.supplyAsync(contextBuilder::build, executor)
.thenAcceptAsync(
ctx -> {
WebEnvironment.initialize(ctx, executor);
parser.initialize(ctx);
ctx.eval(ydocJs);
},
executor)
.get();
}
System.out.println("Press enter to exit");
System.in.read();
} finally {
executor.shutdown();
var terminated = executor.awaitTermination(10, TimeUnit.SECONDS);
if (!terminated) {
executor.shutdownNow();
}
}
}
}

View File

@ -33,7 +33,7 @@ public final class ParserPolyfill implements AutoCloseable, ProxyExecutable, Pol
}
@Override
public void initialize(Context ctx) {
public final void initialize(Context ctx) {
Source parserJs =
Source.newBuilder("js", ParserPolyfill.class.getResource(PARSER_JS)).buildLiteral();

View File

@ -0,0 +1,20 @@
package org.enso.ydoc.polyfill;
import org.enso.ydoc.Polyfill;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
public abstract class PolyfillBase implements Polyfill {
private final String resourceName;
protected PolyfillBase(String resourceName) {
this.resourceName = resourceName;
}
@Override
public final void initialize(Context ctx) {
Source jsSource = Source.newBuilder("js", getClass().getResource(resourceName)).buildLiteral();
ctx.eval(jsSource).execute(this);
}
}

View File

@ -1,10 +1,7 @@
package org.enso.ydoc.polyfill.web;
import java.util.UUID;
import org.enso.ydoc.Polyfill;
import org.enso.ydoc.polyfill.Arguments;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
import org.enso.ydoc.polyfill.PolyfillBase;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.proxy.ProxyExecutable;
import org.slf4j.Logger;
@ -15,23 +12,13 @@ import org.slf4j.LoggerFactory;
* href="https://nodejs.org/api/globals.html#class-abortcontroller">AbortController</a> Node.js
* interface.
*/
final class AbortController implements ProxyExecutable, Polyfill {
final class AbortController extends PolyfillBase implements ProxyExecutable {
private static final Logger log = LoggerFactory.getLogger(AbortController.class);
private static final String RANDOM_UUID = "random-uuid";
private static final String ABORT_CONTROLLER_JS = "abort-controller.js";
AbortController() {}
@Override
public void initialize(Context ctx) {
Source abortControllerJs =
Source.newBuilder("js", AbortController.class.getResource(ABORT_CONTROLLER_JS))
.buildLiteral();
ctx.eval(abortControllerJs).execute(this);
AbortController() {
super(ABORT_CONTROLLER_JS);
}
@Override
@ -40,10 +27,6 @@ final class AbortController implements ProxyExecutable, Polyfill {
log.debug(Arguments.toString(arguments));
return switch (command) {
case RANDOM_UUID -> UUID.randomUUID().toString();
default -> throw new IllegalStateException(command);
};
throw new IllegalStateException(command);
}
}

View File

@ -1,17 +1,15 @@
package org.enso.ydoc.polyfill.web;
import java.util.UUID;
import org.enso.ydoc.Polyfill;
import org.enso.ydoc.polyfill.Arguments;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
import org.enso.ydoc.polyfill.PolyfillBase;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.proxy.ProxyExecutable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Implements the <a href="https://nodejs.org/api/crypto.html">Crypto</a> Node.js interface. */
final class Crypto implements ProxyExecutable, Polyfill {
final class Crypto extends PolyfillBase implements ProxyExecutable {
private static final Logger log = LoggerFactory.getLogger(Crypto.class);
@ -19,13 +17,8 @@ final class Crypto implements ProxyExecutable, Polyfill {
private static final String CRYPTO_JS = "crypto.js";
Crypto() {}
@Override
public void initialize(Context ctx) {
Source cryptoJs = Source.newBuilder("js", Crypto.class.getResource(CRYPTO_JS)).buildLiteral();
ctx.eval(cryptoJs).execute(this);
Crypto() {
super(CRYPTO_JS);
}
@Override

View File

@ -4,10 +4,8 @@ import java.util.Arrays;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.enso.ydoc.Polyfill;
import org.enso.ydoc.polyfill.Arguments;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
import org.enso.ydoc.polyfill.PolyfillBase;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.proxy.ProxyExecutable;
import org.slf4j.Logger;
@ -17,7 +15,7 @@ import org.slf4j.LoggerFactory;
* Implements the <a href="https://nodejs.org/api/events.html#class-eventemitter">EventEmitter</a>
* Node.js interface.
*/
final class EventEmitter implements ProxyExecutable, Polyfill {
final class EventEmitter extends PolyfillBase implements ProxyExecutable {
private static final Logger log = LoggerFactory.getLogger(EventEmitter.class);
@ -29,14 +27,8 @@ final class EventEmitter implements ProxyExecutable, Polyfill {
private static final String EVENT_EMITTER_JS = "event-emitter.js";
EventEmitter() {}
@Override
public void initialize(Context ctx) {
Source eventEmitterJs =
Source.newBuilder("js", EventEmitter.class.getResource(EVENT_EMITTER_JS)).buildLiteral();
ctx.eval(eventEmitterJs).execute(this);
EventEmitter() {
super(EVENT_EMITTER_JS);
}
@Override

View File

@ -3,10 +3,8 @@ package org.enso.ydoc.polyfill.web;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.enso.ydoc.Polyfill;
import org.enso.ydoc.polyfill.Arguments;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
import org.enso.ydoc.polyfill.PolyfillBase;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.proxy.ProxyExecutable;
import org.slf4j.Logger;
@ -16,7 +14,7 @@ import org.slf4j.LoggerFactory;
* Implements the <a href="https://nodejs.org/api/events.html#class-eventtarget">EventTarget</a>
* Node.js interface.
*/
final class EventTarget implements ProxyExecutable, Polyfill {
final class EventTarget extends PolyfillBase implements ProxyExecutable {
private static final Logger log = LoggerFactory.getLogger(EventTarget.class);
@ -28,14 +26,8 @@ final class EventTarget implements ProxyExecutable, Polyfill {
private static final String EVENT_TARGET_JS = "event-target.js";
EventTarget() {}
@Override
public void initialize(Context ctx) {
Source eventTargetJs =
Source.newBuilder("js", EventTarget.class.getResource(EVENT_TARGET_JS)).buildLiteral();
ctx.eval(eventTargetJs).execute(this);
EventTarget() {
super(EVENT_TARGET_JS);
}
@Override

View File

@ -1,9 +1,7 @@
package org.enso.ydoc.polyfill.web;
import org.enso.ydoc.Polyfill;
import org.enso.ydoc.polyfill.Arguments;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
import org.enso.ydoc.polyfill.PolyfillBase;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.proxy.ProxyExecutable;
import org.slf4j.Logger;
@ -13,7 +11,7 @@ import org.slf4j.LoggerFactory;
* Implements the <a href="https://nodejs.org/api/perf_hooks.html">Performance measurement</a>
* Node.js API.
*/
final class Performance implements ProxyExecutable, Polyfill {
final class Performance extends PolyfillBase implements ProxyExecutable {
private static final Logger log = LoggerFactory.getLogger(Performance.class);
@ -21,14 +19,8 @@ final class Performance implements ProxyExecutable, Polyfill {
private static final String PERFORMANCE_JS = "performance.js";
Performance() {}
@Override
public void initialize(Context ctx) {
Source performanceJs =
Source.newBuilder("js", Performance.class.getResource(PERFORMANCE_JS)).buildLiteral();
ctx.eval(performanceJs).execute(this);
Performance() {
super(PERFORMANCE_JS);
}
@Override

View File

@ -5,17 +5,15 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.enso.ydoc.Polyfill;
import org.enso.ydoc.polyfill.Arguments;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
import org.enso.ydoc.polyfill.PolyfillBase;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.proxy.ProxyExecutable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Implements the <a href="https://nodejs.org/api/timers.html">Timers</a> Node.js API. */
final class Timers implements ProxyExecutable, Polyfill {
final class Timers extends PolyfillBase implements ProxyExecutable {
private static final Logger log = LoggerFactory.getLogger(Timers.class);
@ -39,16 +37,10 @@ final class Timers implements ProxyExecutable, Polyfill {
private final ExecutorService executor;
Timers(ExecutorService executor) {
super(TIMERS_JS);
this.executor = executor;
}
@Override
public void initialize(Context ctx) {
Source timersJs = Source.newBuilder("js", Timers.class.getResource(TIMERS_JS)).buildLiteral();
ctx.eval(timersJs).execute(this);
}
public Object setTimeout(Value func, long delay, Value[] args) {
return scheduledExecutor.schedule(execute(func, args), delay, TIME_UNIT);
}

View File

@ -3,10 +3,8 @@ package org.enso.ydoc.polyfill.web;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import org.enso.ydoc.Polyfill;
import org.enso.ydoc.polyfill.Arguments;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
import org.enso.ydoc.polyfill.PolyfillBase;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.io.ByteSequence;
import org.graalvm.polyglot.proxy.ProxyExecutable;
@ -14,7 +12,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Implements the <a href="https://nodejs.org/api/util.html">Util</a> Node.js API. */
final class Util implements ProxyExecutable, Polyfill {
final class Util extends PolyfillBase implements ProxyExecutable {
private static final Logger log = LoggerFactory.getLogger(Util.class);
@ -23,13 +21,8 @@ final class Util implements ProxyExecutable, Polyfill {
private static final String UTIL_JS = "util.js";
Util() {}
@Override
public void initialize(Context ctx) {
Source utilJs = Source.newBuilder("js", Util.class.getResource(UTIL_JS)).buildLiteral();
ctx.eval(utilJs).execute(this);
Util() {
super(UTIL_JS);
}
@Override

View File

@ -15,10 +15,8 @@ import java.util.Arrays;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import org.enso.ydoc.Polyfill;
import org.enso.ydoc.polyfill.Arguments;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Source;
import org.enso.ydoc.polyfill.PolyfillBase;
import org.graalvm.polyglot.Value;
import org.graalvm.polyglot.io.ByteSequence;
import org.graalvm.polyglot.proxy.ProxyExecutable;
@ -29,7 +27,7 @@ import org.slf4j.LoggerFactory;
* Implements the WebSocket and WebSocketServer interfaces of the <a
* href="https://www.npmjs.com/package/ws">ws</a> NPM package.
*/
final class WebSocket implements ProxyExecutable, Polyfill {
final class WebSocket extends PolyfillBase implements ProxyExecutable {
private static final Logger log = LoggerFactory.getLogger(WebSocket.class);
@ -49,17 +47,10 @@ final class WebSocket implements ProxyExecutable, Polyfill {
private final ExecutorService executor;
WebSocket(ExecutorService executor) {
super(WEBSOCKET_JS);
this.executor = executor;
}
@Override
public void initialize(Context ctx) {
Source webSocketJs =
Source.newBuilder("js", WebSocket.class.getResource(WEBSOCKET_JS)).buildLiteral();
ctx.eval(webSocketJs).execute(this);
}
@Override
public Object execute(Value... arguments) {
var command = arguments[0].asString();

View File

@ -0,0 +1,31 @@
package org.enso.ydoc.polyfill;
import static org.junit.Assert.fail;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
public abstract class ExecutorSetup {
protected ExecutorService executor;
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
}
@After
public void tearDown() throws InterruptedException {
if (executor != null) {
executor.shutdown();
var stopped = executor.awaitTermination(3, TimeUnit.SECONDS);
if (!stopped) {
var pending = executor.shutdownNow();
fail("Pending " + pending.size() + " tasks: " + pending);
}
}
}
}

View File

@ -1,8 +1,6 @@
package org.enso.ydoc.polyfill;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.enso.ydoc.polyfill.web.WebEnvironment;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.io.ByteSequence;
@ -11,17 +9,16 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class ParserPolyfillTest {
public class ParserPolyfillTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
private ParserPolyfill parser;
public ParserPolyfillTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
parser = new ParserPolyfill();
var contextBuilder = WebEnvironment.createContext();
@ -37,8 +34,8 @@ public class ParserPolyfillTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
parser.close();
}

View File

@ -1,24 +1,22 @@
package org.enso.ydoc.polyfill.web;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class AbortControllerTest {
public class AbortControllerTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
public AbortControllerTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
var eventTarget = new EventTarget();
var abortController = new AbortController();
var contextBuilder = WebEnvironment.createContext();
@ -36,8 +34,8 @@ public class AbortControllerTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
}

View File

@ -3,24 +3,22 @@ package org.enso.ydoc.polyfill.web;
import java.util.Arrays;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class CryptoTest {
public class CryptoTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
public CryptoTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
var crypto = new Crypto();
var contextBuilder = WebEnvironment.createContext();
@ -36,8 +34,8 @@ public class CryptoTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
}

View File

@ -1,24 +1,22 @@
package org.enso.ydoc.polyfill.web;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class EventEmitterTest {
public class EventEmitterTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
public EventEmitterTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
var eventTarget = new EventTarget();
var eventEmitter = new EventEmitter();
var contextBuilder = WebEnvironment.createContext();
@ -36,8 +34,8 @@ public class EventEmitterTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
}

View File

@ -1,24 +1,22 @@
package org.enso.ydoc.polyfill.web;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class EventTargetTest {
public class EventTargetTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
public EventTargetTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
var eventTarget = new EventTarget();
var contextBuilder = WebEnvironment.createContext();
@ -34,8 +32,8 @@ public class EventTargetTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
}

View File

@ -1,24 +1,22 @@
package org.enso.ydoc.polyfill.web;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class PerformanceTest {
public class PerformanceTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
public PerformanceTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
var eventTarget = new Performance();
var contextBuilder = WebEnvironment.createContext();
@ -34,8 +32,8 @@ public class PerformanceTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
}

View File

@ -1,30 +1,34 @@
package org.enso.ydoc.polyfill.web;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TimersTest {
public class TimersTest extends ExecutorSetup {
private static final Consumer<Object> NULL_CONSUMER = v -> {};
private Context context;
private ExecutorService executor;
public TimersTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
var timers = new Timers(executor);
var contextBuilder = WebEnvironment.createContext();
var hostAccess =
WebEnvironment.defaultHostAccess
.allowAccess(Semaphore.class.getDeclaredMethod("release"))
.build();
var contextBuilder = WebEnvironment.createContext(hostAccess);
context =
CompletableFuture.supplyAsync(
@ -38,26 +42,29 @@ public class TimersTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
}
@Test
public void setTimeout() throws Exception {
var lock = new Semaphore(0);
var code =
"""
globalThis.result = 0;
var p = function (x, y) {
globalThis.result = 10*x + y;
lock.release();
};
setTimeout(p, 0, 4, 2);
""";
var result =
CompletableFuture.supplyAsync(() -> context.eval("js", code), executor)
.thenApplyAsync(v -> context.eval("js", "result"), executor)
.get();
context.getBindings("js").putMember("lock", lock);
CompletableFuture.supplyAsync(() -> context.eval("js", code), executor).get();
lock.acquire();
var result = CompletableFuture.supplyAsync(() -> context.eval("js", "result"), executor).get();
Assert.assertEquals(42, result.asInt());
}

View File

@ -1,8 +1,7 @@
package org.enso.ydoc.polyfill.web;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.io.ByteSequence;
import org.junit.After;
@ -10,16 +9,15 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class UtilTest {
public class UtilTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
public UtilTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
var encoding = new Util();
var contextBuilder = WebEnvironment.createContext();
@ -35,8 +33,8 @@ public class UtilTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
}

View File

@ -3,26 +3,24 @@ package org.enso.ydoc.polyfill.web;
import io.helidon.webclient.websocket.WsClient;
import io.helidon.websocket.WsListener;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicReferenceArray;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class WebSocketServerTest {
public class WebSocketServerTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
public WebSocketServerTest() {}
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
var hostAccess =
WebEnvironment.defaultHostAccess
@ -44,8 +42,8 @@ public class WebSocketServerTest {
}
@After
public void tearDown() {
executor.close();
public void tearDown() throws InterruptedException {
super.tearDown();
context.close();
}

View File

@ -12,16 +12,16 @@ import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.atomic.AtomicReferenceArray;
import org.enso.ydoc.polyfill.ExecutorSetup;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class WebSocketTest {
public class WebSocketTest extends ExecutorSetup {
private Context context;
private ExecutorService executor;
private ExecutorService webServerExecutor;
private WebServer ws;
@ -38,7 +38,7 @@ public class WebSocketTest {
@Before
public void setup() throws Exception {
executor = Executors.newSingleThreadExecutor();
super.setup();
webServerExecutor = Executors.newSingleThreadExecutor();
ws = startWebSocketServer(webServerExecutor);
@ -64,10 +64,10 @@ public class WebSocketTest {
}
@After
public void tearDown() {
public void tearDown() throws InterruptedException {
ws.stop();
webServerExecutor.close();
executor.close();
webServerExecutor.shutdown();
super.tearDown();
context.close();
}

View File

@ -5,6 +5,7 @@ import com.typesafe.scalalogging.Logger
import org.apache.commons.lang3.concurrent.BasicThreadFactory
import org.enso.logger.masking.Masking
import org.enso.logging.LoggingServiceManager
import org.enso.projectmanager.boot.Cli.{PROFILING_PATH, PROFILING_TIME}
import org.enso.projectmanager.service.versionmanagement.RuntimeVersionManagerFactory
import org.enso.runtimeversionmanager.config.GlobalRunnerConfigurationManager
import org.enso.runtimeversionmanager.runner.{LanguageServerOptions, Runner}
@ -103,10 +104,10 @@ object ExecutorWithUnlimitedPool extends LanguageServerExecutor {
)
val profilingPathArguments =
descriptor.profilingPath.toSeq
.flatMap(path => Seq("--profiling-path", path.toString))
.flatMap(path => Seq(s"--$PROFILING_PATH", path.toString))
val profilingTimeArguments =
descriptor.profilingTime.toSeq
.flatMap(time => Seq("--profiling-time", time.toSeconds.toString))
.flatMap(time => Seq(s"--$PROFILING_TIME", time.toSeconds.toString))
val startupArgs =
if (descriptor.skipGraalVMUpdater) Seq("--skip-graalvm-updater")
else Seq()

17
package-lock.json generated
View File

@ -59,7 +59,6 @@
"rimraf": "^5.0.5",
"semver": "^7.5.4",
"sucrase": "^3.34.0",
"verte-vue3": "^1.1.1",
"vue": "^3.4.19",
"ws": "^8.13.0",
"y-codemirror.next": "^0.3.2",
@ -8490,11 +8489,6 @@
"color-name": "1.1.3"
}
},
"node_modules/color-fns": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/color-fns/-/color-fns-0.0.10.tgz",
"integrity": "sha512-QFKowTE9CXCLp09Gz5cQo8VPUP55hf73iHEI52JC3NyKfMpQG2VoLWmTxYeTKH6ngkEnoMrCdEX//M6J4PVQBA=="
},
"node_modules/color-name": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
@ -20576,17 +20570,6 @@
"node": ">=0.6.0"
}
},
"node_modules/verte-vue3": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/verte-vue3/-/verte-vue3-1.1.1.tgz",
"integrity": "sha512-U8shxtA88VA7jh63CSOq+hGoh4LuGAd//nuYY0Mly5oFi0nWv8JtaBJlIpJ60rHE7YG/p1cotCCD6IpOUDheoQ==",
"dependencies": {
"color-fns": "^0.0.10"
},
"peerDependencies": {
"vue": ">=3.2.0"
}
},
"node_modules/vite": {
"version": "4.5.2",
"resolved": "https://registry.npmjs.org/vite/-/vite-4.5.2.tgz",

View File

@ -5,6 +5,8 @@ import org.enso.base.polyglot.NumericConverter;
import org.enso.base.statistics.Statistic;
import org.enso.table.data.column.storage.Storage;
import org.enso.table.data.column.storage.numeric.DoubleStorage;
import org.enso.table.data.column.storage.numeric.LongStorage;
import org.enso.table.data.column.storage.type.IntegerType;
import org.enso.table.data.table.Column;
import org.enso.table.data.table.problems.IgnoredNaN;
import org.enso.table.data.table.problems.IgnoredNothing;
@ -31,7 +33,7 @@ public class AddRunning {
return runningStatistic.getResult();
}
private static RunningStatistic<Double> createRunningStatistic(
private static RunningStatistic<?> createRunningStatistic(
Statistic statistic, Column sourceColumn, ProblemAggregator problemAggregator) {
switch (statistic) {
case Sum -> {
@ -41,38 +43,98 @@ public class AddRunning {
return new RunningMeanStatistic(sourceColumn, problemAggregator);
}
case Minimum -> {
if (sourceColumn.getStorage().getType() instanceof IntegerType type) {
return new RunningMinLongStatistic(sourceColumn, problemAggregator, type);
}
return new RunningMinStatistic(sourceColumn, problemAggregator);
}
case Maximum -> {
if (sourceColumn.getStorage().getType() instanceof IntegerType type) {
return new RunningMaxLongStatistic(sourceColumn, problemAggregator, type);
}
return new RunningMaxStatistic(sourceColumn, problemAggregator);
}
default -> throw new IllegalArgumentException("Unsupported statistic: " + statistic);
}
}
private abstract static class RunningStatisticBase implements RunningStatistic<Double> {
private interface TypeHandler<T> {
T tryConvertingToType(Object o);
long typeToRawLongBits(T t);
Storage<T> createStorage(long[] result, int size, BitSet isNothing);
}
private static class DoubleHandler implements TypeHandler<Double> {
@Override
public Double tryConvertingToType(Object o) {
return NumericConverter.tryConvertingToDouble(o);
}
@Override
public long typeToRawLongBits(Double d) {
return Double.doubleToRawLongBits(d);
}
@Override
public Storage<Double> createStorage(long[] result, int size, BitSet isNothing) {
return new DoubleStorage(result, size, isNothing);
}
}
private static class LongHandler implements TypeHandler<Long> {
IntegerType type;
LongHandler(IntegerType type) {
this.type = type;
}
@Override
public Long tryConvertingToType(Object o) {
return NumericConverter.tryConvertingToLong(o);
}
@Override
public long typeToRawLongBits(Long l) {
return l;
}
@Override
public Storage<Long> createStorage(long[] result, int size, BitSet isNothing) {
return new LongStorage(result, size, isNothing, type);
}
}
private abstract static class RunningStatisticBase<T> implements RunningStatistic<T> {
long[] result;
BitSet isNothing;
ColumnAggregatedProblemAggregator columnAggregatedProblemAggregator;
Column sourceColumn;
TypeHandler<T> typeHandler;
RunningStatisticBase(Column sourceColumn, ProblemAggregator problemAggregator) {
RunningStatisticBase(
Column sourceColumn, ProblemAggregator problemAggregator, TypeHandler<T> typeHandler) {
result = new long[sourceColumn.getSize()];
isNothing = new BitSet();
columnAggregatedProblemAggregator = new ColumnAggregatedProblemAggregator(problemAggregator);
this.sourceColumn = sourceColumn;
this.typeHandler = typeHandler;
}
@Override
public void calculateNextValue(int i, RunningIterator<Double> it) {
public void calculateNextValue(int i, RunningIterator<T> it) {
Object value = sourceColumn.getStorage().getItemBoxed(i);
if (value == null) {
columnAggregatedProblemAggregator.reportColumnAggregatedProblem(
new IgnoredNothing(sourceColumn.getName(), i));
}
Double dValue = NumericConverter.tryConvertingToDouble(value);
Double dNextValue;
T dValue = typeHandler.tryConvertingToType(value);
T dNextValue;
if (dValue != null && dValue.equals(Double.NaN)) {
columnAggregatedProblemAggregator.reportColumnAggregatedProblem(
new IgnoredNaN(sourceColumn.getName(), i));
@ -83,13 +145,13 @@ public class AddRunning {
if (dNextValue == null) {
isNothing.set(i);
} else {
result[i] = Double.doubleToRawLongBits(dNextValue);
result[i] = typeHandler.typeToRawLongBits(dNextValue);
}
}
@Override
public Storage<Double> getResult() {
return new DoubleStorage(result, sourceColumn.getSize(), isNothing);
public Storage<T> getResult() {
return typeHandler.createStorage(result, sourceColumn.getSize(), isNothing);
}
}
@ -127,10 +189,10 @@ public class AddRunning {
}
}
private static class RunningSumStatistic extends RunningStatisticBase {
private static class RunningSumStatistic extends RunningStatisticBase<Double> {
RunningSumStatistic(Column sourceColumn, ProblemAggregator problemAggregator) {
super(sourceColumn, problemAggregator);
super(sourceColumn, problemAggregator, new DoubleHandler());
}
@Override
@ -147,10 +209,10 @@ public class AddRunning {
}
}
private static class RunningMeanStatistic extends RunningStatisticBase {
private static class RunningMeanStatistic extends RunningStatisticBase<Double> {
RunningMeanStatistic(Column sourceColumn, ProblemAggregator problemAggregator) {
super(sourceColumn, problemAggregator);
super(sourceColumn, problemAggregator, new DoubleHandler());
}
@Override
@ -181,10 +243,10 @@ public class AddRunning {
}
}
private static class RunningMinStatistic extends RunningStatisticBase {
private static class RunningMinStatistic extends RunningStatisticBase<Double> {
RunningMinStatistic(Column sourceColumn, ProblemAggregator problemAggregator) {
super(sourceColumn, problemAggregator);
super(sourceColumn, problemAggregator, new DoubleHandler());
}
@Override
@ -201,10 +263,31 @@ public class AddRunning {
}
}
private static class RunningMaxStatistic extends RunningStatisticBase {
private static class RunningMinLongStatistic extends RunningStatisticBase<Long> {
RunningMinLongStatistic(
Column sourceColumn, ProblemAggregator problemAggregator, IntegerType type) {
super(sourceColumn, problemAggregator, new LongHandler(type));
}
@Override
public RunningIterator<Long> getNewIterator() {
return new RunningMinLongIterator();
}
private static class RunningMinLongIterator extends RunningIteratorLong {
@Override
public void increment(long value) {
current = Math.min(current, value);
}
}
}
private static class RunningMaxStatistic extends RunningStatisticBase<Double> {
RunningMaxStatistic(Column sourceColumn, ProblemAggregator problemAggregator) {
super(sourceColumn, problemAggregator);
super(sourceColumn, problemAggregator, new DoubleHandler());
}
@Override
@ -220,4 +303,59 @@ public class AddRunning {
}
}
}
private static class RunningMaxLongStatistic extends RunningStatisticBase<Long> {
RunningMaxLongStatistic(
Column sourceColumn, ProblemAggregator problemAggregator, IntegerType type) {
super(sourceColumn, problemAggregator, new LongHandler(type));
}
@Override
public RunningIterator<Long> getNewIterator() {
return new RunningMaxLongIterator();
}
private static class RunningMaxLongIterator extends RunningIteratorLong {
@Override
public void increment(long value) {
current = Math.max(current, value);
}
}
}
private abstract static class RunningIteratorLong implements RunningIterator<Long> {
protected long current;
private boolean isInitialized = false;
@Override
public Long next(Long value) {
if (value != null) {
if (!isInitialized) {
isInitialized = true;
initialize(value);
} else {
increment(value);
}
}
return isInitialized ? getCurrent() : null;
}
@Override
public Long currentValue() {
return isInitialized ? getCurrent() : null;
}
protected void initialize(long value) {
current = value;
}
protected abstract void increment(long value);
protected long getCurrent() {
return current;
}
}
}

View File

@ -3,6 +3,7 @@ from Standard.Base import all
from Standard.Table import Table, Value_Type, Aggregate_Column
import Standard.Table.Internal.Multi_Value_Key.Ordered_Multi_Value_Key
import Standard.Table.Internal.Multi_Value_Key.Unordered_Multi_Value_Key
from Standard.Table.Internal.Storage import get_storage_for_column
from Standard.Test import Bench
polyglot java import org.enso.table.data.index.OrderedMultiValueKey
@ -78,7 +79,7 @@ collect_benches = Bench.build builder->
compare_ordered_keys make_key table compare_keys
run_java table =
key_storages = table.columns.map c-> c.java_column.getStorage
key_storages = table.columns.map get_storage_for_column
directions = Vector.fill key_storages.length 1
make_key row_ix = OrderedMultiValueKey.new key_storages row_ix directions
compare_keys key1 key2 = key1.compareTo key2 < 0
@ -101,7 +102,7 @@ collect_benches = Bench.build builder->
compute_hashcodes make_key table get_hash
run_java table =
key_storages = table.columns.map c-> c.java_column.getStorage
key_storages = table.columns.map get_storage_for_column
text_folding_strategies = Vector.fill key_storages.length TextFoldingStrategy.unicodeNormalizedFold
make_key row_ix = UnorderedMultiValueKey.new key_storages row_ix text_folding_strategies
get_hash key = key.hashCode

View File

@ -1,5 +1,6 @@
from Standard.Base import all
from Standard.Table import all
from Standard.Table.Internal.Storage import get_storage_for_column
from Standard.Test import Bench
@ -17,7 +18,7 @@ type Boxed_Total_Aggregate
self.text_column.to_table.aggregate [] [Aggregate_Column.Longest 0] . at 0 . at 0
java_loop self =
SimpleStorageAggregateHelpers.longestText self.text_column.java_column.getStorage
SimpleStorageAggregateHelpers.longestText (get_storage_for_column self.text_column)
enso_aggregate_vector_proxy self =
n = self.text_column.length
@ -30,7 +31,7 @@ type Boxed_Total_Aggregate
enso_aggregate_storage_get_item self =
n = self.text_column.length
storage = self.text_column.java_column.getStorage
storage = get_storage_for_column self.text_column
(0.up_to n).fold Nothing acc-> ix->
item = storage.getItemBoxed ix
if acc.is_nothing then item else
@ -49,7 +50,7 @@ type Primitive_Total_Aggregate
self.int_column.to_table.aggregate [] [Aggregate_Column.Sum 0] . at 0 . at 0
java_loop self =
long_storage = self.int_column.java_column.getStorage
long_storage = get_storage_for_column self.int_column
SimpleStorageAggregateHelpers.sumLongStorage long_storage
enso_aggregate_vector_proxy self =
@ -60,7 +61,7 @@ type Primitive_Total_Aggregate
enso_aggregate_storage_get_item self =
n = self.int_column.length
storage = self.int_column.java_column.getStorage
storage = get_storage_for_column self.int_column
(0.up_to n).fold 0 acc-> ix->
if storage.isNothing ix then acc else
acc + storage.getItem ix
@ -75,7 +76,7 @@ type Boxed_Sum_Months
Instance date_column
java_loop self =
date_storage = self.date_column.java_column.getStorage
date_storage = get_storage_for_column self.date_column
SimpleStorageAggregateHelpers.sumMonthsOfDateStorage date_storage
enso_aggregate_vector_proxy self =
@ -86,7 +87,7 @@ type Boxed_Sum_Months
enso_aggregate_storage_get_item self =
n = self.date_column.length
storage = self.date_column.java_column.getStorage
storage = get_storage_for_column self.date_column
(0.up_to n).fold 0 acc-> ix->
item = storage.getItemBoxed ix
if item.is_nothing then acc else

View File

@ -1,5 +1,6 @@
from Standard.Base import all
from Standard.Table import all
from Standard.Table.Internal.Storage import get_storage_for_column
from Standard.Test import Bench
@ -19,7 +20,7 @@ type Boxed_Bi_Map_Test
java_map self =
Column.from_storage "result" <|
MapHelpers.stringConcatBimap self.text_column_1.java_column.getStorage self.text_column_2.java_column.getStorage
MapHelpers.stringConcatBimap (get_storage_for_column self.text_column_1) (get_storage_for_column self.text_column_2)
enso_map_as_vector self convert_polyglot_dates =
vector_proxy_1 = self.text_column_1.to_vector
@ -31,8 +32,8 @@ type Boxed_Bi_Map_Test
n = self.text_column_1.length
if self.text_column_2.length != n then Panic.throw "LENGTH MISMATCH" else
builder = StringBuilder.new n
storage_1 = self.text_column_1.java_column.getStorage
storage_2 = self.text_column_2.java_column.getStorage
storage_1 = get_storage_for_column self.text_column_1
storage_2 = get_storage_for_column self.text_column_2
0.up_to n . each i->
item_1 = storage_1.getItemBoxed i
item_2 = storage_2.getItemBoxed i
@ -53,7 +54,7 @@ type Primitive_Bi_Map_Test
java_map self =
Column.from_storage "result" <|
MapHelpers.longAddBimap self.int_column_1.java_column.getStorage self.int_column_2.java_column.getStorage
MapHelpers.longAddBimap (get_storage_for_column self.int_column_1) (get_storage_for_column self.int_column_2)
enso_map_as_vector self convert_polyglot_dates =
vector_proxy_1 = self.int_column_1.to_vector
@ -65,8 +66,8 @@ type Primitive_Bi_Map_Test
n = self.int_column_1.length
if self.int_column_2.length != n then Panic.throw "LENGTH MISMATCH" else
builder = NumericBuilder.createLongBuilder n
storage_1 = self.int_column_1.java_column.getStorage
storage_2 = self.int_column_2.java_column.getStorage
storage_1 = get_storage_for_column self.int_column_1
storage_2 = get_storage_for_column self.int_column_2
0.up_to n . each i->
if storage_1.isNothing i || storage_2.isNothing i then builder.appendNulls 1 else
item_1 = storage_1.getItem i

View File

@ -1,5 +1,6 @@
from Standard.Base import all
from Standard.Table import all
from Standard.Table.Internal.Storage import get_storage_for_column
from Standard.Test import Bench
@ -21,7 +22,7 @@ type Boxed_Map_Test
java_map self =
Column.from_storage "result" <|
MapHelpers.textEndsWith self.text_column.java_column.getStorage self.suffix
MapHelpers.textEndsWith (get_storage_for_column self.text_column) self.suffix
enso_map_as_vector self convert_polyglot_dates =
suffix = self.suffix
@ -33,7 +34,7 @@ type Boxed_Map_Test
suffix = self.suffix
n = self.text_column.length
builder = BoolBuilder.new n
storage = self.text_column.java_column.getStorage
storage = get_storage_for_column self.text_column
0.up_to n . each i->
item = storage.getItemBoxed i
case item of
@ -58,7 +59,7 @@ type Primitive_Map_Test
java_map self =
Column.from_storage "result" <|
MapHelpers.longAdd self.int_column.java_column.getStorage self.shift
MapHelpers.longAdd (get_storage_for_column self.int_column) self.shift
enso_map_as_vector self convert_polyglot_dates =
shift = self.shift
@ -70,7 +71,7 @@ type Primitive_Map_Test
shift = self.shift
n = self.int_column.length
builder = NumericBuilder.createLongBuilder n
storage = self.int_column.java_column.getStorage
storage = get_storage_for_column self.int_column
0.up_to n . each i->
case storage.isNothing i of
True ->

View File

@ -1,5 +1,6 @@
from Standard.Base import all
from Standard.Table import all
from Standard.Table.Internal.Storage import get_storage_for_column
from Standard.Test import Bench
@ -20,7 +21,7 @@ type Boxed_Map_Test_2
java_map self =
Column.from_storage "result" <|
MapHelpers.getYear self.date_column.java_column.getStorage
MapHelpers.getYear (get_storage_for_column self.date_column)
## We can still opt-out of `convert_polyglot_dates`, because this is applied
at output which is Integer. If our output was another Date, we could not
@ -33,7 +34,7 @@ type Boxed_Map_Test_2
enso_map_with_builder_append_long self =
n = self.date_column.length
builder = NumericBuilder.createLongBuilder n
storage = self.date_column.java_column.getStorage
storage = get_storage_for_column self.date_column
0.up_to n . each i->
case storage.getItemBoxed i of
Nothing ->
@ -47,7 +48,7 @@ type Boxed_Map_Test_2
enso_map_with_builder_append_object self =
n = self.date_column.length
builder = NumericBuilder.createLongBuilder n
storage = self.date_column.java_column.getStorage
storage = get_storage_for_column self.date_column
0.up_to n . each i->
case storage.getItemBoxed i of
Nothing ->

View File

@ -26,7 +26,7 @@ type Boxed_Enso_Callback_Test
expected_type = Storage.from_value_type_strict Value_Type.Char
Column.from_storage "result" <|
Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator->
MapHelpers.mapCallback self.text_column.java_column.getStorage self.fn expected_type java_problem_aggregator
MapHelpers.mapCallback (Storage.get_storage_for_column self.text_column) self.fn expected_type java_problem_aggregator
enso_map_as_vector self convert_polyglot_dates =
vector_proxy = self.text_column.to_vector
@ -37,7 +37,7 @@ type Boxed_Enso_Callback_Test
n = self.text_column.length
fn = self.fn
builder = StringBuilder.new n
storage = self.text_column.java_column.getStorage
storage = Storage.get_storage_for_column self.text_column
0.up_to n . each i->
case storage.getItemBoxed i of
Nothing ->
@ -62,13 +62,13 @@ type Primitive_Enso_Callback_Test
java_vectorized self =
Column.from_storage "result" <|
MapHelpers.longAdd self.int_column.java_column.getStorage self.shift
MapHelpers.longAdd (Storage.get_storage_for_column self.int_column) self.shift
java_roundtrip self =
expected_type = Storage.from_value_type_strict Value_Type.Integer
Column.from_storage "result" <|
Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator->
MapHelpers.mapCallback self.int_column.java_column.getStorage self.fn expected_type java_problem_aggregator
MapHelpers.mapCallback (Storage.get_storage_for_column self.int_column) self.fn expected_type java_problem_aggregator
enso_map_as_vector_inferred_builder self convert_polyglot_dates =
vector_proxy = self.int_column.to_vector
@ -82,13 +82,13 @@ type Primitive_Enso_Callback_Test
expected_storage_type = Storage.from_value_type_strict Value_Type.Integer
java_column = Java_Problems.with_problem_aggregator Problem_Behavior.Report_Error java_problem_aggregator->
Java_Column.fromItemsNoDateConversion "result" mapped expected_storage_type java_problem_aggregator
Column.Value java_column
Column.from_storage java_column.getName java_column.getStorage
enso_map_with_builder_2_calls_unboxed self =
n = self.int_column.length
fn = self.fn
builder = NumericBuilder.createLongBuilder n
storage = self.int_column.java_column.getStorage
storage = Storage.get_storage_for_column self.int_column
0.up_to n . each i->
case storage.isNothing i of
True ->
@ -102,7 +102,7 @@ type Primitive_Enso_Callback_Test
n = self.int_column.length
fn = self.fn
builder = NumericBuilder.createLongBuilder n
storage = self.int_column.java_column.getStorage
storage = Storage.get_storage_for_column self.int_column
0.up_to n . each i->
case storage.getItemBoxed i of
Nothing ->

View File

@ -1,5 +1,6 @@
from Standard.Base import all
from Standard.Table import Table
from Standard.Table.Internal.Storage import get_storage_for_column
from Standard.Test import Bench
polyglot java import org.enso.exploratory_benchmark_helpers.LongNullHandling
@ -27,7 +28,7 @@ collect_benches = Bench.build builder->
builder.group ("Handling_Null_Return_" + num_rows.to_text) options group_builder->
get_storage name =
data.table.at name . java_column . getStorage
get_storage_for_column (data.table.at name)
group_builder.specify "NoNulls_10percent" <|
LongNullHandling.runNoNulls (get_storage "a") (get_storage "b")

View File

@ -21,7 +21,7 @@ column_from_vector name items convert_polyglot_dates =
Java_Column.fromItems name items expected_storage_type java_problem_aggregator
False ->
Java_Column.fromItemsNoDateConversion name items expected_storage_type java_problem_aggregator
Column.Value java_column
Column.from_storage java_column.getName java_column.getStorage
check_results results =
mapped = results.map x-> case x of

View File

@ -222,41 +222,41 @@ add_specs suite_builder setup =
r2.catch.cause . should_be_a Missing_Input_Columns
r2.catch.to_display_text . should_equal "No columns in the result, because of another problem: The criteria 'hmmm' did not match any columns."
suite_builder.group prefix+"Table.select_by_type and Table.remove_by_type" group_builder->
suite_builder.group prefix+"Table.select_columns_by_type and Table.remove_columns_by_type" group_builder->
data = Mixed_Columns_Data.setup create_connection_fn table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should be able to select by type of columns" <|
expect_column_names ["int"] <| data.table.select_by_type [Value_Type.Integer]
expect_column_names ["float"] <| data.table.select_by_type [Value_Type.Float]
expect_column_names ["int", "float"] <| data.table.select_by_type [Value_Type.Integer, Value_Type.Float]
expect_column_names ["text"] <| data.table.select_by_type [Value_Type.Char]
expect_column_names ["bool"] <| data.table.select_by_type [Value_Type.Boolean]
expect_column_names ["int"] <| data.table.select_columns_by_type [Value_Type.Integer]
expect_column_names ["float"] <| data.table.select_columns_by_type [Value_Type.Float]
expect_column_names ["int", "float"] <| data.table.select_columns_by_type [Value_Type.Integer, Value_Type.Float]
expect_column_names ["text"] <| data.table.select_columns_by_type [Value_Type.Char]
expect_column_names ["bool"] <| data.table.select_columns_by_type [Value_Type.Boolean]
group_builder.specify "should be able to drop by type of columns" <|
expect_column_names ["float", "text", "bool"] <| data.table.remove_by_type [Value_Type.Integer]
expect_column_names ["int", "text", "bool"] <| data.table.remove_by_type [Value_Type.Float]
expect_column_names ["text", "bool"] <| data.table.remove_by_type [Value_Type.Integer, Value_Type.Float]
expect_column_names ["int", "float", "bool"] <| data.table.remove_by_type [Value_Type.Char]
expect_column_names ["int", "float", "text"] <| data.table.remove_by_type [Value_Type.Boolean]
expect_column_names ["float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Integer]
expect_column_names ["int", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Float]
expect_column_names ["text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Integer, Value_Type.Float]
expect_column_names ["int", "float", "bool"] <| data.table.remove_columns_by_type [Value_Type.Char]
expect_column_names ["int", "float", "text"] <| data.table.remove_columns_by_type [Value_Type.Boolean]
group_builder.specify "should be able to select by strict type" <|
expect_column_names ["int"] <| data.table.select_by_type [Value_Type.Integer] strict=True
data.table.select_by_type [Value_Type.Integer Bits.Bits_16] strict=True . should_fail_with No_Output_Columns
expect_column_names ["float"] <| data.table.select_by_type [Value_Type.Float] strict=True
data.table.select_by_type [Value_Type.Float Bits.Bits_32] strict=True . should_fail_with No_Output_Columns
expect_column_names ["text"] <| data.table.select_by_type [Value_Type.Char] strict=True
data.table.select_by_type [Value_Type.Char 3 False] strict=True . should_fail_with No_Output_Columns
expect_column_names ["int"] <| data.table.select_columns_by_type [Value_Type.Integer] strict=True
data.table.select_columns_by_type [Value_Type.Integer Bits.Bits_16] strict=True . should_fail_with No_Output_Columns
expect_column_names ["float"] <| data.table.select_columns_by_type [Value_Type.Float] strict=True
data.table.select_columns_by_type [Value_Type.Float Bits.Bits_32] strict=True . should_fail_with No_Output_Columns
expect_column_names ["text"] <| data.table.select_columns_by_type [Value_Type.Char] strict=True
data.table.select_columns_by_type [Value_Type.Char 3 False] strict=True . should_fail_with No_Output_Columns
group_builder.specify "should be able to remove by strict type" <|
expect_column_names ["float", "text", "bool"] <| data.table.remove_by_type [Value_Type.Integer] strict=True
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_by_type [Value_Type.Integer Bits.Bits_16] strict=True
expect_column_names ["int", "text", "bool"] <| data.table.remove_by_type [Value_Type.Float] strict=True
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_by_type [Value_Type.Float Bits.Bits_32] strict=True
expect_column_names ["int", "float", "bool"] <| data.table.remove_by_type [Value_Type.Char] strict=True
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_by_type [Value_Type.Char 3 False] strict=True
expect_column_names ["float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Integer] strict=True
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Integer Bits.Bits_16] strict=True
expect_column_names ["int", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Float] strict=True
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Float Bits.Bits_32] strict=True
expect_column_names ["int", "float", "bool"] <| data.table.remove_columns_by_type [Value_Type.Char] strict=True
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Char 3 False] strict=True
suite_builder.group prefix+"Table.remove_columns" group_builder->
data = Select_Columns_Data.setup create_connection_fn table_builder

View File

@ -226,13 +226,13 @@ postgres_specific_spec suite_builder create_connection_fn db_name setup =
data.teardown
group_builder.specify "should return Table information" <|
i = data.t.info
i = data.t.column_info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "doubles"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]
group_builder.specify "should return Table information, also for aggregated results" <|
i = data.t.aggregate columns=[Aggregate_Column.Concatenate "strs", Aggregate_Column.Sum "ints", Aggregate_Column.Count_Distinct "bools"] . info
i = data.t.aggregate columns=[Aggregate_Column.Concatenate "strs", Aggregate_Column.Sum "ints", Aggregate_Column.Count_Distinct "bools"] . column_info
i.at "Column" . to_vector . should_equal ["Concatenate strs", "Sum ints", "Count Distinct bools"]
i.at "Items Count" . to_vector . should_equal [1, 1, 1]
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Decimal, Value_Type.Integer]

View File

@ -42,7 +42,7 @@ add_redshift_specific_specs suite_builder create_connection_fn =
data.teardown
group_builder.specify "should return Table information" <|
i = data.t.info
i = data.t.column_info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer Bits.Bits_32, Value_Type.Boolean, Value_Type.Float Bits.Bits_32]

View File

@ -198,7 +198,7 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup =
data.teardown
group_builder.specify "should return Table information" <|
i = data.t.info
i = data.t.column_info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]

View File

@ -143,7 +143,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
data.teardown
group_builder.specify "should return Table information" <|
i = data.t.info
i = data.t.column_info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "doubles"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]

View File

@ -37,8 +37,8 @@ add_specs suite_builder =
group_builder.specify "should be able to read in a table with dates" <|
data.table.column_count.should_equal 5
data.table.row_count.should_equal 7
data.table.info.at "Column" . to_vector . should_equal ['Number','Party', 'Title', 'From', 'To']
data.table.info.at "Value Type" . to_vector . should_equal [Value_Type.Integer, Value_Type.Char, Value_Type.Char, Value_Type.Date, Value_Type.Date]
data.table.column_info.at "Column" . to_vector . should_equal ['Number','Party', 'Title', 'From', 'To']
data.table.column_info.at "Value Type" . to_vector . should_equal [Value_Type.Integer, Value_Type.Char, Value_Type.Char, Value_Type.Date, Value_Type.Date]
group_builder.specify "should be able to treat a single value as a Date" <|
from_column = data.table.at 'From'

Some files were not shown because too many files have changed in this diff Show More