1
1
mirror of https://github.com/enso-org/enso.git synced 2024-12-19 11:44:00 +03:00

Add traces to integration tests + suppress one flaky assertion. ()

Fixes 

Most issues were caused by a problem with Project List flooding the network with its requests - this was fixed on develop.
But one assertion was flaky - it assumed we will see the "real" run result on `write` node, but sometimes it is immediately overwritten by dry run.

But the most important part of this PR is adding traces to Electron packages - it's should be much easier now to debug E2E test failures.

Also renamed the previously misnamed "E2E tests" to "[GUI] integration tests".
This commit is contained in:
Adam Obuchowicz 2024-11-27 15:09:59 +01:00 committed by GitHub
parent b0863e9ca4
commit 736134e491
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
102 changed files with 251 additions and 260 deletions
.github
.gitignore
app
.vscode
gui
env.d.ts
integration-test
package.jsonplaywright.config.ts
src
dashboard/modules/payments/components
project-view
components/GraphEditor
test-entrypoint.ts
tsconfig.node.jsonvite.config.tsvite.test.config.tsvitest.config.ts
ide-desktop/client
build/build/src/ci_gen

4
.github/CODEOWNERS vendored
View File

@ -31,8 +31,8 @@ tsconfig.json
# GUI/Dashboard
/app @Frizi @farmaazon @vitvakatu @kazcw @AdRiley @PabloBuchu @indiv0 @somebody1234 @MrFlashAccount
/app/gui/e2e/dashboard @PabloBuchu @indiv0 @somebody1234 @MrFlashAccount
/app/gui/e2e/project-view @Frizi @farmaazon @vitvakatu @kazcw @AdRiley
/app/gui/integration-test/dashboard @PabloBuchu @indiv0 @somebody1234 @MrFlashAccount
/app/gui/integration-test/project-view @Frizi @farmaazon @vitvakatu @kazcw @AdRiley
/app/gui/src/dashboard @PabloBuchu @indiv0 @somebody1234 @MrFlashAccount
/app/gui/src/project-view @Frizi @farmaazon @vitvakatu @kazcw @AdRiley
/app/ide-desktop/ @PabloBuchu @indiv0 @somebody1234 @MrFlashAccount

View File

@ -70,7 +70,7 @@ jobs:
- name: 🧪 Unit Tests
id: unit-tests
continue-on-error: true
run: pnpm run ci:test
run: pnpm run ci:unit-test
- name: 📝 Annotate Code Linting Results
if: always()
@ -146,7 +146,7 @@ jobs:
- name: 🎭 Playwright Tests
working-directory: app/gui
run: pnpm run e2e --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
run: pnpm run test:integration --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
- name: ⬆️ Upload blob report to GitHub Actions Artifacts
if: ${{ !cancelled() }}

View File

@ -372,6 +372,13 @@ jobs:
ENSO_TEST_USER: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_USERNAME }}
ENSO_TEST_USER_PASSWORD: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_PASSWORD }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- if: failure()
name: Upload Test Traces
uses: actions/upload-artifact@v4
with:
compression-level: 0
name: test-traces-linux-amd64
path: app/ide-desktop/client/test-traces
- run: rm $HOME/.enso/credentials
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -444,6 +451,13 @@ jobs:
ENSO_TEST_USER: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_USERNAME }}
ENSO_TEST_USER_PASSWORD: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_PASSWORD }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- if: failure()
name: Upload Test Traces
uses: actions/upload-artifact@v4
with:
compression-level: 0
name: test-traces-macos-amd64
path: app/ide-desktop/client/test-traces
- run: rm $HOME/.enso/credentials
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -512,7 +526,13 @@ jobs:
ENSO_TEST_USER: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_USERNAME }}
ENSO_TEST_USER_PASSWORD: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_PASSWORD }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true
- if: failure()
name: Upload Test Traces
uses: actions/upload-artifact@v4
with:
compression-level: 0
name: test-traces-windows-amd64
path: app/ide-desktop/client/test-traces
- run: rm $HOME/.enso/credentials
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View File

@ -182,5 +182,6 @@ test-results
##########################
test-results/
test-traces/
playwright-report/
playwright/.cache/

View File

@ -1,38 +1,6 @@
{
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Dashboard",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-dashboard", "dev"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Dashboard (Electron, Linux)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso", "watch:linux"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Dashboard (Electron, macOS)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso", "watch:macos"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Dashboard (Electron, Windows)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso", "watch:windows"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
@ -44,59 +12,35 @@
{
"type": "node",
"request": "launch",
"name": "GUI (Storybook)",
"name": "GUI (Electron, Linux)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-gui", "story:dev"],
"runtimeArgs": ["run", "--filter", "enso", "watch:linux"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Dashboard (Build)",
"name": "GUI (Electron, macOS)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso", "watch:macos"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "GUI (Electron, Windows)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso", "watch:windows"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "GUI (Build)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["build:gui"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Dashboard (E2E UI)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-dashboard", "test-dev:e2e"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "GUI (E2E UI)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-gui", "test:e2e", "--", "--ui"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Dashboard (All tests)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-dashboard", "test"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Dashboard (E2E tests)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-dashboard", "test:e2e"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Dashboard (Unit tests)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-dashboard", "test:unit"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
@ -107,18 +51,26 @@
{
"type": "node",
"request": "launch",
"name": "GUI (E2E tests)",
"name": "GUI (Unit tests)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-gui", "test:e2e"],
"runtimeArgs": ["run", "--filter", "enso-gui", "test:unit"],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "GUI (Unit tests)",
"name": "GUI (Integration tests)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-gui", "test:unit", "--", "run"],
"runtimeArgs": ["run", "--filter", "enso-gui", "test:integration"],
"outputCapture": "std"
}
},
{
"type": "node",
"request": "launch",
"name": "GUI (Integration tests with UI)",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "--filter", "enso-gui", "test-dev:integration"],
"outputCapture": "std"
},
]
}

2
app/gui/env.d.ts vendored
View File

@ -234,7 +234,7 @@ declare global {
// @ts-expect-error The index signature is intentional to disallow unknown env vars.
readonly ENSO_CLOUD_ENSO_HOST?: string
// === E2E test variables ===
// === Integration test variables ===
readonly PWDEBUG?: '1'
readonly IS_IN_PLAYWRIGHT_TEST?: `${boolean}`

View File

@ -1,4 +1,4 @@
# End-to-end tests
# Integration tests
## Running tests
@ -6,20 +6,20 @@ Execute all commands from the parent directory.
```sh
# Run tests normally
pnpm run test:e2e
pnpm run test:integration
# Open UI to run tests
pnpm run test:e2e:debug
pnpm run test:integration:debug
# Run tests in a specific file only
pnpm run test:e2e -- e2e/file-name-here.spec.ts
pnpm run test:e2e:debug -- e2e/file-name-here.spec.ts
pnpm run test:integration -- integration-test/file-name-here.spec.ts
pnpm run test:integration:debug -- integration-test/file-name-here.spec.ts
# Compile the entire app before running the tests.
# DOES NOT hot reload the tests.
# Prefer not using this when you are trying to fix a test;
# prefer using this when you just want to know which tests are failing (if any).
PROD=1 pnpm run test:e2e
PROD=1 pnpm run test:e2e:debug
PROD=1 pnpm run test:e2e -- e2e/file-name-here.spec.ts
PROD=1 pnpm run test:e2e:debug -- e2e/file-name-here.spec.ts
PROD=1 pnpm run test:integration
PROD=1 pnpm run test:integration:debug
PROD=1 pnpm run test:integration -- integration-test/file-name-here.spec.ts
PROD=1 pnpm run test:integration:debug -- integration-test/file-name-here.spec.ts
```
## Getting started

View File

@ -13,8 +13,7 @@ export const expect = baseExpect.extend({
try {
await expect(locator.first()).toBeVisible()
pass = true
} catch (e) {
console.log(e)
} catch {
pass = false
}

View File

@ -7,7 +7,7 @@ import { edgesFromNodeWithBinding, edgesToNodeWithBinding } from './locate'
/**
* Every edge consists of multiple parts.
* See e2e/edgeRendering.spec.ts for explanation.
* See edgeRendering.spec.ts for explanation.
*/
const EDGE_PARTS = 2

View File

@ -10,7 +10,7 @@ import {
} from './projectManager'
/**
* Setup for all E2E tests.
* Setup for all Project View's Integration tests.
*
* It runs mocked project manager server.
*/

View File

@ -26,9 +26,10 @@
"dev:vite": "vite",
"test": "corepack pnpm run /^^^^test:.*/",
"test:unit": "vitest run",
"test:integration": "cross-env NODE_ENV=production playwright test",
"test-dev:unit": "vitest",
"test-dev:e2e": "cross-env NODE_ENV=production playwright test --ui",
"test-dev-dashboard:e2e": "cross-env NODE_ENV=production playwright test ./e2e/dashboard/ --ui",
"test-dev:integration": "cross-env NODE_ENV=production playwright test --ui",
"test-dev-dashboard:integration": "cross-env NODE_ENV=production playwright test ./integration-test/dashboard/ --ui",
"preinstall": "corepack pnpm run generate-metadata",
"generate-metadata": "node scripts/generateIconMetadata.js",
"storybook:react": "cross-env FRAMEWORK=react storybook dev",
@ -37,7 +38,6 @@
"build-storybook:vue": "cross-env FRAMEWORK=vue storybook build",
"chromatic:react": "cross-env FRAMEWORK=react chromatic deploy",
"chromatic:vue": "cross-env FRAMEWORK=vue chromatic deploy",
"e2e": "cross-env NODE_ENV=production playwright test",
"playwright:install": "playwright install chromium"
},
"dependencies": {

View File

@ -9,7 +9,7 @@
import { defineConfig } from '@playwright/test'
import net from 'net'
const DEBUG = process.env.DEBUG_E2E === 'true'
const DEBUG = process.env.DEBUG_TEST === 'true'
const isCI = process.env.CI === 'true'
const isProd = process.env.PROD === 'true'
@ -101,7 +101,7 @@ export default defineConfig({
// Setup project
{
name: 'Setup Dashboard',
testDir: './e2e/dashboard',
testDir: './integration-test/dashboard',
testMatch: /.*\.setup\.ts/,
timeout: TIMEOUT_MS,
use: {
@ -111,7 +111,7 @@ export default defineConfig({
},
{
name: 'Dashboard',
testDir: './e2e/dashboard',
testDir: './integration-test/dashboard',
testMatch: /.*\.spec\.ts/,
dependencies: ['Setup Dashboard'],
expect: {
@ -127,7 +127,7 @@ export default defineConfig({
},
{
name: 'Auth',
testDir: './e2e/dashboard/auth',
testDir: './integration-test/dashboard/auth',
expect: {
toHaveScreenshot: { threshold: 0 },
timeout: TIMEOUT_MS,
@ -140,13 +140,15 @@ export default defineConfig({
},
{
name: 'Setup Tests for Project View',
testMatch: /e2e\/project-view\/setup\.ts/,
testMatch: /integration-test\/project-view\/setup\.ts/,
},
{
name: 'Project View',
dependencies: ['Setup Tests for Project View'],
testDir: './e2e/project-view',
testDir: './integration-test/project-view',
timeout: 60000,
repeatEach: 3,
retries: 0,
expect: {
timeout: 5000,
toHaveScreenshot: { threshold: 0 },
@ -159,7 +161,7 @@ export default defineConfig({
],
webServer: [
{
env: { E2E: 'true' },
env: { INTEGRATION_TEST: 'true' },
command:
isCI || isProd ?
`corepack pnpm build && corepack pnpm exec vite preview --port ${ports.projectView} --strictPort`

View File

@ -64,7 +64,7 @@ export function AddPaymentMethodForm<
const cardElement =
// FIXME[sb]: I do not understand why `useWatch` is not sufficient for Playwright.
// (The value is always `undefined` with `useWatch` alone)
// It is worth noting that E2E tests previously worked without requiring this change - as of:
// It is worth noting that integration tests previously worked without requiring this change - as of:
// 1500849c32f70f5f4d95240b7e31377c649dc25b
Form.useWatch({ control: form.control, name: 'cardElement' }) ?? form.getValues().cardElement

View File

@ -105,7 +105,7 @@ const sourceRect = computed<Rect | undefined>(() => {
/**
* Edges which do not have `sourceRect` and `targetPos` initialized are marked by a special
* `broken-edge` data-testid, for debugging and e2e test purposes.
* `broken-edge` data-testid, for debugging and integration test purposes.
*/
const edgeIsBroken = computed(
() =>

View File

@ -1,5 +1,5 @@
/***
* This is a web entrypoint file for the GUI application running in e2e tests (playwright). It is
* This is a web entrypoint file for the GUI application running in integration tests (playwright). It is
* not included in normal application distribution. The goal of separate entrypoint is to allow
* providing mocks for connections with engine and to avoid running dashboard.
*/

View File

@ -8,7 +8,7 @@
"playwright.config.*",
"eslint.config.js",
"tailwind.config.js",
"e2e/**/*",
"integration-test/**/*",
"src/dashboard/hooks/eventCallbackHooks.ts",
"src/dashboard/modules/payments/constants.ts",
"src/dashboard/services/Backend.ts",

View File

@ -14,7 +14,7 @@ import wasm from 'vite-plugin-wasm'
import tailwindConfig from './tailwind.config'
const dynHostnameWsUrl = (port: number) => JSON.stringify(`ws://__HOSTNAME__:${port}`)
const projectManagerUrl = dynHostnameWsUrl(process.env.E2E === 'true' ? 30536 : 30535)
const projectManagerUrl = dynHostnameWsUrl(process.env.INTEGRATION_TEST === 'true' ? 30536 : 30535)
const IS_CLOUD_BUILD = process.env.CLOUD_BUILD === 'true'
const YDOC_SERVER_URL =
process.env.ENSO_POLYGLOT_YDOC_SERVER ? JSON.stringify(process.env.ENSO_POLYGLOT_YDOC_SERVER)
@ -24,7 +24,9 @@ const YDOC_SERVER_URL =
await readEnvironmentFromFile()
const entrypoint =
process.env.E2E === 'true' ? './src/project-view/e2e-entrypoint.ts' : './src/entrypoint.ts'
process.env.INTEGRATION_TEST === 'true' ?
'./src/project-view/test-entrypoint.ts'
: './src/entrypoint.ts'
// NOTE(Frizi): This rename is for the sake of forward compatibility with not yet merged config refactor on bazel branch,
// and because Vite's HTML env replacements only work with import.meta.env variables, not defines.

View File

@ -1,4 +1,4 @@
/** @file Vite configuration for dashboard e2e tests' server. */
/** @file Vite configuration for dashboard integration tests' server. */
import { fileURLToPath } from 'node:url'
import { defineConfig, mergeConfig } from 'vite'
@ -21,10 +21,10 @@ export default mergeConfig(
resolve: {
alias: {
'@stripe/stripe-js/pure': fileURLToPath(
new URL('./e2e/dashboard/mock/stripe.ts', import.meta.url),
new URL('./integration-test/dashboard/mock/stripe.ts', import.meta.url),
),
'@stripe/react-stripe-js': fileURLToPath(
new URL('./e2e/dashboard/mock/react-stripe.tsx', import.meta.url),
new URL('./integration-test/dashboard/mock/react-stripe.tsx', import.meta.url),
),
},
extensions: [

View File

@ -8,7 +8,7 @@ const config = mergeConfig(
test: {
environment: 'jsdom',
includeSource: ['./src/**/*.{ts,vue}'],
exclude: [...configDefaults.exclude, 'e2e/**/*'],
exclude: [...configDefaults.exclude, 'integration-test/**/*'],
root: fileURLToPath(new URL('./', import.meta.url)),
restoreMocks: true,
},

View File

@ -49,6 +49,7 @@
"esbuild": "^0.23.0",
"esbuild-plugin-wasm": "^1.1.0",
"fast-glob": "^3.2.12",
"lib0": "^0.2.85",
"playwright": "^1.45.0",
"portfinder": "^1.0.32",
"tsx": "^4.7.1",

View File

@ -195,6 +195,7 @@ declare global {
// === Integration test variables ===
readonly ENSO_TEST?: string
readonly ENSO_TEST_PROJECTS_DIR?: string
readonly ENSO_TEST_APP_ARGS?: string
readonly ENSO_TEST_USER?: string
readonly ENSO_TEST_USER_PASSWORD?: string

View File

@ -366,9 +366,7 @@ class App {
enableBlinkFeatures: argGroups.chrome.options.enableBlinkFeatures.value,
disableBlinkFeatures: argGroups.chrome.options.disableBlinkFeatures.value,
spellcheck: false,
...(process.env.ENSO_TEST != null && process.env.ENSO_TEST !== '' ?
{ partition: 'test' }
: {}),
...(process.env.ENSO_TEST ? { partition: 'test' } : {}),
}
const windowPreferences: electron.BrowserWindowConstructorOptions = {
webPreferences,

View File

@ -388,8 +388,8 @@ export function getProjectRoot(subtreePath: string): string | null {
/** Get the directory that stores Enso projects. */
export function getProjectsDirectory(): string {
if (process.env.ENSO_TEST != null && process.env.ENSO_TEST !== '') {
return pathModule.join(os.tmpdir(), 'enso-test-projects', process.env.ENSO_TEST)
if (process.env.ENSO_TEST_PROJECTS_DIR) {
return process.env.ENSO_TEST_PROJECTS_DIR
} else {
const documentsPath = desktopEnvironment.DOCUMENTS
if (documentsPath === undefined) {

View File

@ -1,32 +0,0 @@
/** @file A test for basic flow of the application: open project and see if nodes appear. */
import { expect } from '@playwright/test'
import { electronTest, loginAsTestUser } from './electronTest'
electronTest('Create new project', async ({ page }) => {
await loginAsTestUser(page)
await expect(page.getByRole('button', { name: 'New Project', exact: true })).toBeVisible()
await page.getByRole('button', { name: 'New Project', exact: true }).click()
await expect(page.locator('.GraphNode')).toHaveCount(1, { timeout: 60000 })
// We see the node type and visualization, so the engine is running the program
await expect(page.locator('.node-type')).toHaveText('Table', { timeout: 30000 })
await expect(page.locator('.TableVisualization')).toBeVisible({ timeout: 30000 })
await expect(page.locator('.TableVisualization')).toContainText('Welcome To Enso!')
// We can add new node and see suggestions.
await page.locator('.GraphNode').click()
await page.keyboard.press('Enter')
await expect(page.locator('.ComponentBrowser')).toBeVisible()
const entry = page.locator('.ComponentList .list-variant.selected .component', {
hasText: 'column_count',
})
await expect(entry).toBeVisible()
await entry.click()
await expect(page.locator('.GraphNode'), {}).toHaveCount(2)
await page.locator('.GraphNode', { hasText: 'column_count' }).click()
await page
.locator('.GraphNode', { hasText: 'column_count' })
.getByRole('button', { name: 'Visualization' })
.click()
})

View File

@ -1,13 +1,14 @@
/** @file Commonly used functions for electron tests */
import { _electron, expect, type Page, test } from '@playwright/test'
import { _electron, ElectronApplication, expect, type Page, test } from '@playwright/test'
import { TEXTS } from 'enso-common/src/text'
import fs from 'node:fs/promises'
import * as random from 'lib0/random'
import os from 'node:os'
import pathModule from 'node:path'
const LOADING_TIMEOUT = 10000
const TEXT = TEXTS.english
export const CONTROL_KEY = os.platform() === 'darwin' ? 'Meta' : 'Control'
/**
* Tests run on electron executable.
@ -16,21 +17,32 @@ const TEXT = TEXTS.english
*/
export function electronTest(
name: string,
body: (args: { page: Page; projectsDir: string }) => Promise<void> | void,
body: (args: {
page: Page
app: ElectronApplication
projectsDir: string
}) => Promise<void> | void,
) {
test(name, async () => {
const uuid = random.uuidv4()
const projectsDir = pathModule.join(os.tmpdir(), 'enso-test-projects', `${name}-${uuid}`)
console.log('Running Application; projects dir is', projectsDir)
const app = await _electron.launch({
executablePath: process.env.ENSO_TEST_EXEC_PATH ?? '',
args: process.env.ENSO_TEST_APP_ARGS != null ? process.env.ENSO_TEST_APP_ARGS.split(',') : [],
env: { ...process.env, ['ENSO_TEST']: name },
env: { ...process.env, ENSO_TEST: name, ENSO_TEST_PROJECTS_DIR: projectsDir },
})
const page = await app.firstWindow()
await app.context().tracing.start({ screenshots: true, snapshots: true, sources: true })
// Wait until page will be finally loaded: we expect login screen.
// There's bigger timeout, because the page may load longer on CI machines.
await expect(page.getByText('Login to your account')).toBeVisible({ timeout: LOADING_TIMEOUT })
const projectsDir = pathModule.join(os.tmpdir(), 'enso-test-projects', name)
await body({ page, projectsDir })
await app.close()
try {
await body({ page, app, projectsDir })
} finally {
await app.context().tracing.stop({ path: `test-traces/${name}.zip` })
await app.close()
}
})
}
@ -62,21 +74,3 @@ export async function loginAsTestUser(page: Page) {
await page.getByTestId('form-submit-button').click()
}
/**
* Find the most recently edited Enso project in `projectsDir` and return its absolute path.
* There can be multiple projects, as the directory can be reused by subsequent test runs.
* We precisely know the naming schema for new projects, and we use this knowledge to
* find the project that was created most recently.
*/
export async function findMostRecentlyCreatedProject(projectsDir: string): Promise<string | null> {
const dirContent = await fs.readdir(projectsDir)
const sorted = dirContent.sort((a, b) => {
// Project names always end with a number, so we can sort them by that number.
const numA = parseInt(a.match(/\d+/)![0], 10)
const numB = parseInt(b.match(/\d+/)![0], 10)
return numA - numB
})
const last = sorted.pop()
return last != null ? pathModule.join(projectsDir, last) : null
}

View File

@ -0,0 +1,113 @@
/** @file A test for basic flow of the application: open project and see if nodes appear. */
import { expect } from '@playwright/test'
import fs from 'node:fs/promises'
import pathModule from 'node:path'
import { CONTROL_KEY, electronTest, loginAsTestUser } from './electronTest'
electronTest('Local Workflow', async ({ page, app, projectsDir }) => {
const PROJECT_PATH = pathModule.join(projectsDir, 'NewProject1')
const OUTPUT_FILE = 'output.txt'
const TEXT_TO_WRITE = 'Some text'
await loginAsTestUser(page)
await expect(page.getByRole('button', { name: 'New Project', exact: true })).toBeVisible()
await page.getByRole('button', { name: 'New Project', exact: true }).click()
await expect(page.locator('.GraphNode')).toHaveCount(1, { timeout: 60000 })
// We see the node type and visualization, so the engine is running the program
await expect(page.locator('.node-type')).toHaveText('Table', { timeout: 30000 })
await expect(page.locator('.TableVisualization')).toBeVisible({ timeout: 30000 })
await expect(page.locator('.TableVisualization')).toContainText('Welcome To Enso!')
// Create node connected to the first node by picking suggestion.
await page.locator('.GraphNode').click()
await page.keyboard.press('Enter')
await expect(page.locator('.ComponentBrowser')).toBeVisible()
const entry = page.locator('.ComponentList .list-variant.selected .component', {
hasText: 'column_count',
})
await expect(entry).toBeVisible()
await entry.click()
await expect(page.locator('.GraphNode'), {}).toHaveCount(2)
const addedNode = page.locator('.GraphNode', { hasText: 'column_count' })
await addedNode.click()
await addedNode.getByRole('button', { name: 'Visualization' }).click()
await expect(addedNode.locator('.TableVisualization')).toBeVisible()
await expect(addedNode.locator('.TableVisualization')).toContainText('1')
// Create new text literal node.
await page.keyboard.press('Escape') // deselect.
await page.locator('.PlusButton').click()
await expect(page.locator('.ComponentBrowser')).toBeVisible()
const input = page.locator('.ComponentBrowser input')
await input.fill(`'${TEXT_TO_WRITE}'`)
await page.keyboard.press('Enter')
await expect(page.locator('.GraphNode'), {}).toHaveCount(3)
// Create write node
await page.keyboard.press('Enter')
await expect(page.locator('.ComponentBrowser')).toBeVisible()
const code = `write (enso_project.root / '${OUTPUT_FILE}') on_existing_file=..Append`
await input.fill(code)
await page.keyboard.press('Enter')
await expect(page.locator('.GraphNode'), {}).toHaveCount(4)
// Check that the output file is not created yet.
const writeNode = page.locator('.GraphNode', { hasText: 'write' })
await writeNode.click()
await writeNode.getByRole('button', { name: 'Visualization' }).click()
await expect(writeNode.locator('.TableVisualization')).toContainText('output_ensodryrun')
expect(await fs.readdir(PROJECT_PATH)).not.toContain(OUTPUT_FILE)
// Press `Write once` button.
await writeNode.locator('.More').click()
await writeNode.getByTestId('recompute').click()
// Check that the output file is created and contains expected text.
try {
await expect(writeNode.locator('.TableVisualization')).toContainText(OUTPUT_FILE)
} catch {
// TODO[ao]
// The above check is flaky, because sometimes the additional engine run overrides node output back to "dry run".
// To confirm if this should be expected.
console.error(
'Didn\'t see the visualization update after "Write once" action; assuming it\'s already done',
)
}
let projectFiles = await fs.readdir(PROJECT_PATH)
expect(projectFiles).toContain(OUTPUT_FILE)
if (projectFiles.includes(OUTPUT_FILE)) {
const content = await readFile(PROJECT_PATH, OUTPUT_FILE)
expect(content).toStrictEqual(TEXT_TO_WRITE)
}
// Put an image to clipboard.
await app.evaluate(({ nativeImage, clipboard }) => {
const image = nativeImage.createFromDataURL(
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAMAAAADBAMAAACkBqiMAAAAFVBMVEX+MjP+Kyv6bGz7X1/9Ojr+Li7+JyfwctYrAAAAEUlEQVQI12NwVWBgE2BgNAAAA6EArVWjc3wAAAAASUVORK5CYII=',
)
clipboard.writeImage(image)
})
// Open docpanel and paste an image.
await page.getByRole('button', { name: 'Documentation Panel' }).click()
await page.locator('.DocumentationEditor').click()
await page.keyboard.press(`${CONTROL_KEY}+V`)
const docImageElement = page.locator('.DocumentationEditor').getByAltText('Image')
await expect(docImageElement).toBeVisible()
await expect(docImageElement).toHaveJSProperty('width', 3)
// Image is properly uploaded.
// Wait for upload; while uploading, the image is a bit transparent.
await expect(docImageElement).not.toHaveClass(/[$ ]uploading[^ ]/, { timeout: 10000 })
projectFiles = await fs.readdir(PROJECT_PATH)
expect(projectFiles).toContain('images')
const images = await fs.readdir(pathModule.join(PROJECT_PATH, 'images'))
expect(images).toContain('image.png')
})
async function readFile(projectDir: string, fileName: string): Promise<string> {
return await fs.readFile(pathModule.join(projectDir, fileName), 'utf8')
}

View File

@ -1,73 +0,0 @@
/**
* @file A test for `Write` button in the node menu check that nodes do not write
* to files unless specifically asked for.
*/
import { expect } from '@playwright/test'
import assert from 'node:assert'
import fs from 'node:fs/promises'
import pathModule from 'node:path'
import { electronTest, findMostRecentlyCreatedProject, loginAsTestUser } from './electronTest'
electronTest('Recompute', async ({ page, projectsDir }) => {
await loginAsTestUser(page)
await expect(page.getByRole('button', { name: 'New Project', exact: true })).toBeVisible()
await page.getByRole('button', { name: 'New Project', exact: true }).click()
await expect(page.locator('.GraphNode')).toHaveCount(1, { timeout: 60000 })
// We see the node type and visualization, so the engine is running the program
await expect(page.locator('.node-type')).toHaveText('Table', { timeout: 30000 })
await expect(page.locator('.TableVisualization')).toBeVisible({ timeout: 30000 })
await expect(page.locator('.TableVisualization')).toContainText('Welcome To Enso!')
const OUTPUT_FILE = 'output.txt'
const EXPECTED_OUTPUT = 'Some text'
// Create first node (text literal)
await page.locator('.PlusButton').click()
await expect(page.locator('.ComponentBrowser')).toBeVisible()
const input = page.locator('.ComponentBrowser input')
await input.fill(`'${EXPECTED_OUTPUT}'`)
await page.keyboard.press('Enter')
await expect(page.locator('.GraphNode'), {}).toHaveCount(2)
// Create second node (write)
await page.keyboard.press('Enter')
await expect(page.locator('.ComponentBrowser')).toBeVisible()
const code = `write (enso_project.root / '${OUTPUT_FILE}') on_existing_file=..Append`
await input.fill(code)
await page.keyboard.press('Enter')
await expect(page.locator('.GraphNode'), {}).toHaveCount(3)
// Check that the output file is not created yet.
const writeNode = page.locator('.GraphNode', { hasText: 'write' })
await writeNode.click()
await writeNode.getByRole('button', { name: 'Visualization' }).click()
await expect(writeNode.locator('.TableVisualization')).toContainText('output_ensodryrun')
const ourProject = await findMostRecentlyCreatedProject(projectsDir)
expect(ourProject).not.toBeNull()
assert(ourProject)
expect(await listFiles(ourProject)).not.toContain(OUTPUT_FILE)
// Press `Write once` button.
await writeNode.locator('.More').click()
await writeNode.getByTestId('recompute').click()
// Check that the output file is created and contains expected text.
await expect(writeNode.locator('.TableVisualization')).toContainText(OUTPUT_FILE)
const projectFiles = await listFiles(ourProject)
expect(projectFiles).toContain(OUTPUT_FILE)
if (projectFiles.includes(OUTPUT_FILE)) {
const content = await readFile(ourProject, OUTPUT_FILE)
expect(content).toStrictEqual(EXPECTED_OUTPUT)
}
})
async function listFiles(projectDir: string): Promise<string[]> {
return await fs.readdir(projectDir)
}
async function readFile(projectDir: string, fileName: string): Promise<string> {
return await fs.readFile(pathModule.join(projectDir, fileName), 'utf8')
}

View File

@ -18,6 +18,7 @@ use core::panic;
use ide_ci::actions::workflow::definition::cancel_workflow_action;
use ide_ci::actions::workflow::definition::get_input_expression;
use ide_ci::actions::workflow::definition::shell;
use ide_ci::actions::workflow::definition::step::Argument;
use ide_ci::actions::workflow::definition::Access;
use ide_ci::actions::workflow::definition::Job;
use ide_ci::actions::workflow::definition::JobArchetype;
@ -587,20 +588,28 @@ impl JobArchetype for PackageIde {
} else {
shell(TEST_COMMAND)
};
let mut test_step = test_step
let test_step = test_step
.with_env("DEBUG", "pw:browser log:")
.with_secret_exposed_as(secret::ENSO_CLOUD_TEST_ACCOUNT_USERNAME, "ENSO_TEST_USER")
.with_secret_exposed_as(
secret::ENSO_CLOUD_TEST_ACCOUNT_PASSWORD,
"ENSO_TEST_USER_PASSWORD",
);
// Make E2E tests optional on Windows, as we have an ongoing issue with the runner.
// TODO[ib]: remove once the issue is resolved.
if target.0 == OS::Windows {
test_step.continue_on_error = Some(true);
}
steps.push(test_step);
let upload_test_traces_step = Step {
r#if: Some("failure()".into()),
name: Some("Upload Test Traces".into()),
uses: Some("actions/upload-artifact@v4".into()),
with: Some(Argument::Other(BTreeMap::from_iter([
("name".into(), format!("test-traces-{}-{}", target.0, target.1).into()),
("path".into(), "app/ide-desktop/client/test-traces".into()),
("compression-level".into(), 0.into()), // The traces are in zip already.
]))),
..Default::default()
};
steps.push(upload_test_traces_step);
// After the E2E tests run, they create a credentials file in user home directory.
// If that file is not cleaned up, future runs of our tests may randomly get
// authenticated into Enso Cloud. We want to run tests as an authenticated

Some files were not shown because too many files have changed in this diff Show More