From ad69eeb4ad1eb2104eaeabcd070ea940d2db1a9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wawrzyniec=20Urba=C5=84czyk?= Date: Mon, 10 Oct 2022 23:38:48 +0200 Subject: [PATCH] Build script merge (#3743) Merged the build script into main repository. Some related cleanups. --- .cargo/config | 11 - .cargo/config.toml | 11 + .github/CODEOWNERS | 7 +- .github/workflows/benchmark.yml | 40 +- .github/workflows/changelog.yml | 68 + .github/workflows/gui.yml | 447 ++-- .github/workflows/nightly.yml | 344 ++- .github/workflows/scala-new.yml | 204 +- Cargo.lock | 1842 ++++++++--------- Cargo.toml | 19 +- app/gui/Cargo.toml | 2 +- app/gui/analytics/src/lib.rs | 1 + app/gui/config/src/lib.rs | 1 + .../double-representation/Cargo.toml | 2 +- .../src/alias_analysis.rs | 10 +- .../double-representation/src/connection.rs | 6 +- .../double-representation/src/definition.rs | 28 +- .../double-representation/src/graph.rs | 22 +- .../double-representation/src/lib.rs | 7 +- .../double-representation/src/module.rs | 20 +- .../src/refactorings/collapse.rs | 2 +- .../double-representation/src/text.rs | 5 +- app/gui/controller/engine-model/src/lib.rs | 1 + app/gui/controller/engine-protocol/Cargo.toml | 12 +- .../engine-protocol/src/binary/client.rs | 2 +- .../engine-protocol/src/binary/message.rs | 8 +- .../src/language_server/types.rs | 28 +- app/gui/controller/engine-protocol/src/lib.rs | 1 + .../engine-protocol/src/project_manager.rs | 10 +- app/gui/controller/src/lib.rs | 1 + .../src/bin/api_events_to_profile.rs | 1 + .../src/bin/message_beanpoles.rs | 1 + app/gui/enso-profiler-enso-data/src/lib.rs | 1 + app/gui/language/ast/impl/src/assoc.rs | 2 +- app/gui/language/ast/impl/src/known.rs | 2 +- app/gui/language/ast/impl/src/lib.rs | 6 +- app/gui/language/ast/macros/src/lib.rs | 1 + app/gui/language/parser/Cargo.toml | 10 +- app/gui/language/parser/build.rs | 72 +- .../src/{main.rs => bin/run-scala-parser.rs} | 7 +- app/gui/language/parser/src/lib.rs | 1 + app/gui/language/parser/tests/ast.rs | 10 +- app/gui/language/parser/tests/bugs.rs | 4 +- app/gui/language/parser/tests/crumbs.rs | 2 +- app/gui/language/parser/tests/doc-gen.rs | 2 +- app/gui/language/parser/tests/id_map.rs | 4 +- app/gui/language/parser/tests/macros.rs | 4 +- app/gui/language/parser/tests/parsing.rs | 20 +- app/gui/language/parser/tests/web.rs | 4 +- app/gui/language/span-tree/Cargo.toml | 2 +- app/gui/language/span-tree/example/src/lib.rs | 1 + app/gui/language/span-tree/src/action.rs | 2 +- app/gui/language/span-tree/src/generate.rs | 2 +- app/gui/language/span-tree/src/lib.rs | 1 + app/gui/language/span-tree/src/node.rs | 2 +- app/gui/src/controller/graph.rs | 10 +- app/gui/src/controller/graph/executed.rs | 4 +- app/gui/src/controller/ide.rs | 2 +- app/gui/src/controller/ide/desktop.rs | 2 +- app/gui/src/controller/ide/plain.rs | 2 +- app/gui/src/controller/module.rs | 4 +- app/gui/src/controller/project.rs | 4 +- app/gui/src/controller/searcher.rs | 4 +- .../controller/searcher/component/builder.rs | 2 +- .../controller/searcher/component/group.rs | 4 +- .../searcher/component/hardcoded.rs | 2 +- app/gui/src/controller/text.rs | 6 +- app/gui/src/controller/upload.rs | 2 +- app/gui/src/lib.rs | 5 +- app/gui/src/model/execution_context.rs | 4 +- app/gui/src/model/module.rs | 15 +- app/gui/src/model/module/plain.rs | 6 +- app/gui/src/model/module/synchronized.rs | 4 +- app/gui/src/model/project.rs | 6 +- app/gui/src/model/project/synchronized.rs | 4 +- app/gui/src/model/suggestion_database.rs | 2 +- .../src/model/suggestion_database/entry.rs | 8 +- .../src/model/suggestion_database/example.rs | 4 +- app/gui/src/model/undo_redo.rs | 5 +- app/gui/src/presenter/graph.rs | 2 +- app/gui/src/presenter/graph/state.rs | 2 +- .../presenter/graph/visualization/manager.rs | 4 +- app/gui/src/presenter/searcher/provider.rs | 2 +- app/gui/src/test.rs | 4 +- app/gui/src/transport/web.rs | 2 +- app/gui/tests/language_server.rs | 4 +- app/gui/view/Cargo.toml | 2 +- .../component-browser/breadcrumbs/src/lib.rs | 5 +- .../component-group/src/lib.rs | 4 +- .../component-group/src/wide.rs | 2 +- .../searcher-list-panel/src/lib.rs | 3 +- app/gui/view/component-browser/src/lib.rs | 1 + .../debug_scene/component-group/src/lib.rs | 7 +- .../component-list-panel-view/src/lib.rs | 6 +- app/gui/view/debug_scene/icons/src/lib.rs | 1 + app/gui/view/debug_scene/interface/Cargo.toml | 2 +- app/gui/view/debug_scene/interface/src/lib.rs | 3 +- .../new-component-list-panel-view/src/lib.rs | 2 +- app/gui/view/debug_scene/src/lib.rs | 1 + .../view/debug_scene/visualization/src/lib.rs | 1 + .../src/component/add_node_button.rs | 2 +- .../src/component/breadcrumbs/breadcrumb.rs | 2 +- .../src/component/breadcrumbs/project_name.rs | 10 +- .../graph-editor/src/component/node/error.rs | 2 +- .../graph-editor/src/component/profiling.rs | 2 +- app/gui/view/graph-editor/src/lib.rs | 27 +- .../graph-editor/src/new_node_position.rs | 2 +- app/gui/view/graph-editor/src/profiling.rs | 4 +- app/gui/view/src/code_editor.rs | 2 +- app/gui/view/src/lib.rs | 1 + app/gui/view/src/project.rs | 10 +- app/gui/view/src/searcher.rs | 2 +- .../view/src/window_control_buttons/close.rs | 2 +- app/gui/view/welcome-screen/src/lib.rs | 1 + build/.gitignore | 5 + build/Cargo.toml | 10 - build/README.md | 97 + build/build-utils/src/lib.rs | 117 -- build/build/Cargo.toml | 85 + build/build/build.rs | 19 + build/build/examples/artifact.rs | 73 + build/build/examples/experiments.rs | 20 + build/build/examples/s3.rs | 31 + build/build/paths.yaml | 96 + build/build/src/aws.rs | 204 ++ build/build/src/aws/ecr.rs | 58 + build/build/src/aws/ecr/runtime.rs | 28 + build/build/src/bump_version.rs | 14 + build/build/src/changelog.rs | 71 + build/build/src/changelog/check.rs | 67 + build/build/src/ci.rs | 11 + build/build/src/config.rs | 128 ++ build/build/src/context.rs | 69 + build/build/src/engine.rs | 319 +++ build/build/src/engine/bundle.rs | 94 + build/build/src/engine/context.rs | 537 +++++ build/build/src/engine/env.rs | 21 + build/build/src/engine/sbt.rs | 46 + build/build/src/enso.rs | 156 ++ build/build/src/env.rs | 28 + build/build/src/httpbin.rs | 73 + build/build/src/ide.rs | 8 + build/build/src/ide/web.rs | 410 ++++ build/build/src/lib.rs | 200 ++ build/build/src/paths.rs | 304 +++ build/build/src/postgres.rs | 252 +++ build/build/src/prettier.rs | 27 + build/build/src/programs.rs | 45 + build/build/src/project.rs | 363 ++++ build/build/src/project/backend.rs | 165 ++ build/build/src/project/engine.rs | 73 + build/build/src/project/gui.rs | 176 ++ build/build/src/project/ide.rs | 162 ++ build/build/src/project/project_manager.rs | 72 + build/build/src/project/runtime.rs | 92 + build/build/src/project/wasm.rs | 496 +++++ build/build/src/project/wasm/env.rs | 38 + build/build/src/project/wasm/js_patcher.rs | 75 + build/build/src/project/wasm/test.rs | 149 ++ build/build/src/project_manager.rs | 40 + build/build/src/release.rs | 135 ++ build/build/src/repo.rs | 36 + build/build/src/rust.rs | 8 + build/build/src/rust/parser.rs | 86 + build/build/src/source.rs | 99 + build/build/src/version.rs | 277 +++ build/ci_utils/Cargo.toml | 91 + build/ci_utils/src/actions.rs | 10 + build/ci_utils/src/actions/artifacts.rs | 258 +++ .../src/actions/artifacts/artifact.rs | 1 + .../ci_utils/src/actions/artifacts/context.rs | 83 + .../src/actions/artifacts/download.rs | 107 + .../ci_utils/src/actions/artifacts/models.rs | 141 ++ build/ci_utils/src/actions/artifacts/raw.rs | 258 +++ .../src/actions/artifacts/run_session.rs | 86 + .../ci_utils/src/actions/artifacts/upload.rs | 288 +++ build/ci_utils/src/actions/context.rs | 83 + build/ci_utils/src/actions/env.rs | 168 ++ build/ci_utils/src/actions/workflow.rs | 104 + .../src/actions/workflow/definition.rs | 766 +++++++ build/ci_utils/src/anyhow.rs | 46 + build/ci_utils/src/archive.rs | 205 ++ build/ci_utils/src/archive/tar.rs | 30 + build/ci_utils/src/archive/zip.rs | 70 + build/ci_utils/src/buffer.rs | 1 + build/ci_utils/src/cache.rs | 199 ++ build/ci_utils/src/cache/archive.rs | 49 + build/ci_utils/src/cache/artifact.rs | 54 + build/ci_utils/src/cache/asset.rs | 40 + build/ci_utils/src/cache/download.rs | 95 + build/ci_utils/src/cache/goodie.rs | 84 + build/ci_utils/src/cache/goodie/binaryen.rs | 77 + build/ci_utils/src/cache/goodie/graalvm.rs | 164 ++ build/ci_utils/src/cache/goodie/sbt.rs | 33 + build/ci_utils/src/ci.rs | 15 + build/ci_utils/src/deploy.rs | 8 + build/ci_utils/src/deploy/runner.rs | 122 ++ build/ci_utils/src/env.rs | 444 ++++ build/ci_utils/src/env/known.rs | 7 + build/ci_utils/src/extensions.rs | 19 + build/ci_utils/src/extensions/child.rs | 13 + build/ci_utils/src/extensions/clap.rs | 46 + build/ci_utils/src/extensions/command.rs | 78 + build/ci_utils/src/extensions/from_string.rs | 36 + build/ci_utils/src/extensions/future.rs | 75 + build/ci_utils/src/extensions/iterator.rs | 54 + build/ci_utils/src/extensions/maps.rs | 24 + build/ci_utils/src/extensions/octocrab.rs | 9 + build/ci_utils/src/extensions/os.rs | 74 + build/ci_utils/src/extensions/output.rs | 48 + build/ci_utils/src/extensions/path.rs | 100 + build/ci_utils/src/extensions/reqwest.rs | 37 + build/ci_utils/src/extensions/result.rs | 65 + build/ci_utils/src/extensions/str.rs | 28 + build/ci_utils/src/extensions/version.rs | 16 + build/ci_utils/src/fmt.rs | 11 + build/ci_utils/src/fs.rs | 309 +++ build/ci_utils/src/fs/tokio.rs | 96 + build/ci_utils/src/fs/wrappers.rs | 82 + build/ci_utils/src/fs/wrappers/tokio.rs | 54 + build/ci_utils/src/future.rs | 94 + build/ci_utils/src/github.rs | 284 +++ build/ci_utils/src/github/model.rs | 52 + build/ci_utils/src/github/release.rs | 115 + build/ci_utils/src/global.rs | 132 ++ build/ci_utils/src/goodie.rs | 58 + build/ci_utils/src/goodies.rs | 2 + build/ci_utils/src/goodies/musl.rs | 133 ++ build/ci_utils/src/goodies/sbt.rs | 37 + build/ci_utils/src/io.rs | 135 ++ build/ci_utils/src/io/web.rs | 138 ++ build/ci_utils/src/io/web/client.rs | 79 + build/ci_utils/src/lib.rs | 243 +++ build/ci_utils/src/log.rs | 81 + build/ci_utils/src/models.rs | 9 + build/ci_utils/src/models/compose.rs | 36 + build/ci_utils/src/models/config.rs | 214 ++ build/ci_utils/src/os.rs | 18 + build/ci_utils/src/os/target.rs | 189 ++ build/ci_utils/src/path.rs | 9 + build/ci_utils/src/path/trie.rs | 55 + build/ci_utils/src/paths.rs | 498 +++++ build/ci_utils/src/platform.rs | 13 + build/ci_utils/src/platform/win.rs | 8 + build/ci_utils/src/program.rs | 198 ++ build/ci_utils/src/program/command.rs | 540 +++++ .../ci_utils/src/program/command/provider.rs | 19 + build/ci_utils/src/program/location.rs | 35 + build/ci_utils/src/program/resolver.rs | 44 + build/ci_utils/src/program/shell.rs | 48 + build/ci_utils/src/program/version.rs | 76 + build/ci_utils/src/program/with_cwd.rs | 26 + build/ci_utils/src/programs.rs | 49 + build/ci_utils/src/programs/cargo.rs | 137 ++ .../ci_utils/src/programs/cargo/build_env.rs | 23 + build/ci_utils/src/programs/cargo/clippy.rs | 1 + build/ci_utils/src/programs/cargo/fmt.rs | 17 + build/ci_utils/src/programs/cmd.rs | 167 ++ build/ci_utils/src/programs/conda.rs | 19 + build/ci_utils/src/programs/docker.rs | 554 +++++ build/ci_utils/src/programs/flatc.rs | 12 + build/ci_utils/src/programs/git.rs | 129 ++ build/ci_utils/src/programs/git/clean.rs | 114 + build/ci_utils/src/programs/go.rs | 28 + build/ci_utils/src/programs/graal.rs | 99 + build/ci_utils/src/programs/java.rs | 92 + build/ci_utils/src/programs/javac.rs | 35 + build/ci_utils/src/programs/node.rs | 54 + build/ci_utils/src/programs/npx.rs | 12 + build/ci_utils/src/programs/pwsh.rs | 39 + build/ci_utils/src/programs/robocopy.rs | 36 + build/ci_utils/src/programs/rsync.rs | 43 + build/ci_utils/src/programs/rustc.rs | 32 + build/ci_utils/src/programs/rustup.rs | 27 + build/ci_utils/src/programs/sbt.rs | 75 + build/ci_utils/src/programs/seven_zip.rs | 249 +++ build/ci_utils/src/programs/sh.rs | 39 + build/ci_utils/src/programs/tar.rs | 322 +++ build/ci_utils/src/programs/vs.rs | 54 + build/ci_utils/src/programs/vswhere.rs | 272 +++ build/ci_utils/src/programs/wasm_opt.rs | 85 + build/ci_utils/src/programs/wasm_pack.rs | 121 ++ build/ci_utils/src/reqwest.rs | 58 + build/ci_utils/src/serde.rs | 101 + build/cli/Cargo.toml | 31 + build/cli/src/arg.rs | 278 +++ build/cli/src/arg/backend.rs | 64 + build/cli/src/arg/engine.rs | 17 + build/cli/src/arg/git_clean.rs | 13 + build/cli/src/arg/gui.rs | 53 + build/cli/src/arg/ide.rs | 85 + build/cli/src/arg/java_gen.rs | 20 + build/cli/src/arg/project_manager.rs | 18 + build/cli/src/arg/release.rs | 26 + build/cli/src/arg/runtime.rs | 28 + build/cli/src/arg/wasm.rs | 125 ++ build/cli/src/bin/enso-build4/main.rs | 33 + build/cli/src/bin/enso-disable-wasm-opt.rs | 94 + .../cli/src/bin/enso-remove-draft-releases.rs | 35 + build/cli/src/ci_gen.rs | 382 ++++ build/cli/src/ci_gen/job.rs | 211 ++ build/cli/src/ci_gen/step.rs | 39 + build/cli/src/lib.rs | 931 +++++++++ build/{ => cli}/src/main.rs | 5 +- build/{ => deprecated}/build-utils/Cargo.toml | 3 +- build/deprecated/build-utils/src/lib.rs | 48 + .../{ => deprecated}/rust-scripts/Cargo.toml | 0 .../rust-scripts/src/bin/test_all.rs | 1 + build/enso-formatter/Cargo.toml | 3 +- build/enso-formatter/src/lib.rs | 654 ++++++ build/enso-formatter/src/main.rs | 649 +----- build/paths.js | 78 - build/prettier/package.json | 4 +- clippy.toml | 1 + integration-test/src/lib.rs | 1 + lib/rust/automata/src/dfa.rs | 10 +- lib/rust/automata/src/lib.rs | 1 + lib/rust/automata/src/nfa.rs | 19 +- lib/rust/callback/src/lib.rs | 7 +- lib/rust/code-builder/src/lib.rs | 1 + lib/rust/config-reader/src/lib.rs | 15 +- lib/rust/data-structures/src/diet.rs | 12 +- lib/rust/data-structures/src/lib.rs | 1 + lib/rust/debug-api/src/lib.rs | 1 + lib/rust/ensogl/app/theme/derive/src/lib.rs | 2 +- .../ensogl/app/theme/hardcoded/src/lib.rs | 1 + lib/rust/ensogl/component/button/src/lib.rs | 1 + .../component/drop-down-menu/src/lib.rs | 2 + .../ensogl/component/drop-manager/src/lib.rs | 1 + .../ensogl/component/file-browser/src/lib.rs | 1 + .../ensogl/component/flame-graph/src/lib.rs | 1 + .../ensogl/component/grid-view/src/lib.rs | 12 +- lib/rust/ensogl/component/gui/src/lib.rs | 1 + lib/rust/ensogl/component/label/src/lib.rs | 1 + .../ensogl/component/list-view/src/lib.rs | 11 +- .../ensogl/component/scroll-area/src/lib.rs | 3 +- .../ensogl/component/scrollbar/src/lib.rs | 1 + lib/rust/ensogl/component/selector/src/lib.rs | 1 + .../ensogl/component/selector/src/shape.rs | 4 +- .../component/sequence-diagram/src/lib.rs | 3 +- lib/rust/ensogl/component/shadow/src/lib.rs | 1 + lib/rust/ensogl/component/src/lib.rs | 1 + .../component/text/src/buffer/formatting.rs | 4 + .../component/text/src/buffer/movement.rs | 2 +- .../component/text/src/component/text.rs | 24 +- lib/rust/ensogl/component/text/src/font.rs | 2 +- .../text/src/font/embedded/Cargo.toml | 7 +- .../component/text/src/font/embedded/build.rs | 130 +- .../text/src/font/embedded/src/lib.rs | 1 + .../component/text/src/font/family/src/lib.rs | 2 +- .../ensogl/component/text/src/font/glyph.rs | 4 +- .../component/text/src/font/msdf/Cargo.toml | 4 +- .../component/text/src/font/msdf/build.rs | 91 +- .../component/text/src/font/msdf/src/lib.rs | 1 + lib/rust/ensogl/component/text/src/lib.rs | 2 +- .../ensogl/component/toggle-button/src/lib.rs | 1 + lib/rust/ensogl/component/tooltip/src/lib.rs | 1 + lib/rust/ensogl/core/src/animation/easing.rs | 3 +- .../core/src/animation/frp/animation.rs | 3 +- .../ensogl/core/src/animation/frp/easing.rs | 38 +- .../core/src/animation/physics/inertia.rs | 10 +- lib/rust/ensogl/core/src/data/color/data.rs | 2 +- lib/rust/ensogl/core/src/data/dirty.rs | 4 +- .../ensogl/core/src/display/object/class.rs | 2 +- .../core/src/display/render/passes/symbols.rs | 2 +- .../ensogl/core/src/display/style/sheet.rs | 10 +- .../ensogl/core/src/display/style/theme.rs | 2 +- .../ensogl/core/src/display/symbol/gpu.rs | 2 +- .../symbol/gpu/geometry/primitive/mesh.rs | 2 +- lib/rust/ensogl/core/src/display/world.rs | 2 +- lib/rust/ensogl/core/src/gui/cursor.rs | 12 +- lib/rust/ensogl/core/src/gui/style.rs | 6 +- lib/rust/ensogl/core/src/lib.rs | 3 +- .../core/src/system/gpu/context/native.rs | 2 +- .../core/src/system/gpu/data/texture/class.rs | 6 +- .../core/src/system/gpu/shader/compiler.rs | 8 +- lib/rust/ensogl/example/animation/src/lib.rs | 1 + .../example/complex-shape-system/src/lib.rs | 1 + .../example/custom-shape-system/src/lib.rs | 1 + .../ensogl/example/dom-symbols/src/lib.rs | 1 + .../ensogl/example/drop-manager/src/lib.rs | 1 + .../ensogl/example/easing-animator/src/lib.rs | 1 + lib/rust/ensogl/example/grid-view/src/lib.rs | 7 +- lib/rust/ensogl/example/list-view/src/lib.rs | 7 +- .../ensogl/example/mouse-events/src/lib.rs | 1 + .../example/profiling-run-graph/src/lib.rs | 1 + .../render-profile-flamegraph/src/lib.rs | 1 + .../ensogl/example/scroll-area/src/lib.rs | 7 +- .../ensogl/example/shape-system/src/lib.rs | 1 + lib/rust/ensogl/example/slider/src/lib.rs | 7 +- .../sprite-system-benchmark/src/lib.rs | 1 + .../ensogl/example/sprite-system/src/lib.rs | 1 + lib/rust/ensogl/example/src/lib.rs | 1 + lib/rust/ensogl/example/text-area/src/lib.rs | 1 + lib/rust/ensogl/src/lib.rs | 1 + lib/rust/frp/src/lib.rs | 1 + lib/rust/fuzzly/src/lib.rs | 1 + lib/rust/generics/src/lib.rs | 1 + lib/rust/json-rpc/src/lib.rs | 1 + lib/rust/json-rpc/src/macros.rs | 2 +- lib/rust/json-rpc/src/messages.rs | 22 +- lib/rust/json-rpc/tests/test.rs | 6 +- .../launcher-shims/src/bin/launcher_000.rs | 1 + .../launcher-shims/src/bin/launcher_001.rs | 1 + .../launcher-shims/src/bin/launcher_002.rs | 1 + .../launcher-shims/src/bin/launcher_003.rs | 1 + .../launcher-shims/src/bin/launcher_004.rs | 1 + lib/rust/launcher-shims/src/lib.rs | 1 + lib/rust/logger/src/lib.rs | 1 + lib/rust/macro-utils/src/lib.rs | 1 + lib/rust/metamodel/lexpr/src/lib.rs | 1 + lib/rust/metamodel/src/lib.rs | 1 + lib/rust/metamodel/src/meta/serialization.rs | 8 +- lib/rust/optics/src/lib.rs | 1 + .../generate-java/src/bin/graph-java.rs | 1 + .../generate-java/src/bin/graph-meta.rs | 1 + .../generate-java/src/bin/graph-rust.rs | 1 + .../generate-java/src/bin/java-tests.rs | 1 + lib/rust/parser/generate-java/src/lib.rs | 1 + lib/rust/parser/generate-java/src/main.rs | 1 + lib/rust/parser/jni/src/lib.rs | 1 + lib/rust/parser/src/lexer.rs | 2 +- lib/rust/parser/src/lib.rs | 5 +- lib/rust/parser/src/macros/expand.rs | 2 +- lib/rust/parser/src/main.rs | 2 +- lib/rust/parser/src/syntax/tree.rs | 28 +- .../parser/src/syntax/tree/visitor/src/lib.rs | 1 + lib/rust/parser/tests/parse.rs | 2 - lib/rust/prelude/src/lib.rs | 1 + lib/rust/prelude/src/serde.rs | 6 +- lib/rust/prelude/src/tp.rs | 2 +- lib/rust/profiler/data/src/bin/devtools.rs | 7 +- lib/rust/profiler/data/src/bin/intervals.rs | 1 + .../profiler/data/src/bin/measurements.rs | 1 + lib/rust/profiler/data/src/bin/processes.rs | 1 + lib/rust/profiler/data/src/lib.rs | 1 + lib/rust/profiler/demo-data/src/lib.rs | 1 + lib/rust/profiler/flame-graph/src/lib.rs | 1 + lib/rust/profiler/macros/src/lib.rs | 1 + lib/rust/profiler/src/lib.rs | 1 + lib/rust/profiler/src/log.rs | 2 +- lib/rust/reflect/macros/src/lib.rs | 1 + lib/rust/reflect/src/lib.rs | 1 + .../shapely/macros/src/derive_clone_ref.rs | 2 +- .../shapely/macros/src/derive_iterator.rs | 4 +- lib/rust/shapely/macros/src/lib.rs | 2 +- lib/rust/shapely/src/lib.rs | 1 + lib/rust/shortcuts/example/src/lib.rs | 1 + lib/rust/shortcuts/src/lib.rs | 1 + lib/rust/text/src/lib.rs | 1 + lib/rust/types/src/lib.rs | 1 + lib/rust/web/src/lib.rs | 1 + run | 2 +- run.cmd | 4 +- rust-toolchain.toml | 4 +- rustfmt.toml | 3 - tools/language-server/logstat/Cargo.toml | 2 +- tools/language-server/logstat/src/main.rs | 1 + tools/language-server/wstest/Cargo.toml | 2 +- tools/language-server/wstest/src/main.rs | 1 + 460 files changed, 23659 insertions(+), 2992 deletions(-) delete mode 100644 .cargo/config create mode 100644 .cargo/config.toml create mode 100644 .github/workflows/changelog.yml rename app/gui/language/parser/src/{main.rs => bin/run-scala-parser.rs} (88%) create mode 100644 build/.gitignore delete mode 100644 build/Cargo.toml create mode 100644 build/README.md delete mode 100644 build/build-utils/src/lib.rs create mode 100644 build/build/Cargo.toml create mode 100644 build/build/build.rs create mode 100644 build/build/examples/artifact.rs create mode 100644 build/build/examples/experiments.rs create mode 100644 build/build/examples/s3.rs create mode 100644 build/build/paths.yaml create mode 100644 build/build/src/aws.rs create mode 100644 build/build/src/aws/ecr.rs create mode 100644 build/build/src/aws/ecr/runtime.rs create mode 100644 build/build/src/bump_version.rs create mode 100644 build/build/src/changelog.rs create mode 100644 build/build/src/changelog/check.rs create mode 100644 build/build/src/ci.rs create mode 100644 build/build/src/config.rs create mode 100644 build/build/src/context.rs create mode 100644 build/build/src/engine.rs create mode 100644 build/build/src/engine/bundle.rs create mode 100644 build/build/src/engine/context.rs create mode 100644 build/build/src/engine/env.rs create mode 100644 build/build/src/engine/sbt.rs create mode 100644 build/build/src/enso.rs create mode 100644 build/build/src/env.rs create mode 100644 build/build/src/httpbin.rs create mode 100644 build/build/src/ide.rs create mode 100644 build/build/src/ide/web.rs create mode 100644 build/build/src/lib.rs create mode 100644 build/build/src/paths.rs create mode 100644 build/build/src/postgres.rs create mode 100644 build/build/src/prettier.rs create mode 100644 build/build/src/programs.rs create mode 100644 build/build/src/project.rs create mode 100644 build/build/src/project/backend.rs create mode 100644 build/build/src/project/engine.rs create mode 100644 build/build/src/project/gui.rs create mode 100644 build/build/src/project/ide.rs create mode 100644 build/build/src/project/project_manager.rs create mode 100644 build/build/src/project/runtime.rs create mode 100644 build/build/src/project/wasm.rs create mode 100644 build/build/src/project/wasm/env.rs create mode 100644 build/build/src/project/wasm/js_patcher.rs create mode 100644 build/build/src/project/wasm/test.rs create mode 100644 build/build/src/project_manager.rs create mode 100644 build/build/src/release.rs create mode 100644 build/build/src/repo.rs create mode 100644 build/build/src/rust.rs create mode 100644 build/build/src/rust/parser.rs create mode 100644 build/build/src/source.rs create mode 100644 build/build/src/version.rs create mode 100644 build/ci_utils/Cargo.toml create mode 100644 build/ci_utils/src/actions.rs create mode 100644 build/ci_utils/src/actions/artifacts.rs create mode 100644 build/ci_utils/src/actions/artifacts/artifact.rs create mode 100644 build/ci_utils/src/actions/artifacts/context.rs create mode 100644 build/ci_utils/src/actions/artifacts/download.rs create mode 100644 build/ci_utils/src/actions/artifacts/models.rs create mode 100644 build/ci_utils/src/actions/artifacts/raw.rs create mode 100644 build/ci_utils/src/actions/artifacts/run_session.rs create mode 100644 build/ci_utils/src/actions/artifacts/upload.rs create mode 100644 build/ci_utils/src/actions/context.rs create mode 100644 build/ci_utils/src/actions/env.rs create mode 100644 build/ci_utils/src/actions/workflow.rs create mode 100644 build/ci_utils/src/actions/workflow/definition.rs create mode 100644 build/ci_utils/src/anyhow.rs create mode 100644 build/ci_utils/src/archive.rs create mode 100644 build/ci_utils/src/archive/tar.rs create mode 100644 build/ci_utils/src/archive/zip.rs create mode 100644 build/ci_utils/src/buffer.rs create mode 100644 build/ci_utils/src/cache.rs create mode 100644 build/ci_utils/src/cache/archive.rs create mode 100644 build/ci_utils/src/cache/artifact.rs create mode 100644 build/ci_utils/src/cache/asset.rs create mode 100644 build/ci_utils/src/cache/download.rs create mode 100644 build/ci_utils/src/cache/goodie.rs create mode 100644 build/ci_utils/src/cache/goodie/binaryen.rs create mode 100644 build/ci_utils/src/cache/goodie/graalvm.rs create mode 100644 build/ci_utils/src/cache/goodie/sbt.rs create mode 100644 build/ci_utils/src/ci.rs create mode 100644 build/ci_utils/src/deploy.rs create mode 100644 build/ci_utils/src/deploy/runner.rs create mode 100644 build/ci_utils/src/env.rs create mode 100644 build/ci_utils/src/env/known.rs create mode 100644 build/ci_utils/src/extensions.rs create mode 100644 build/ci_utils/src/extensions/child.rs create mode 100644 build/ci_utils/src/extensions/clap.rs create mode 100644 build/ci_utils/src/extensions/command.rs create mode 100644 build/ci_utils/src/extensions/from_string.rs create mode 100644 build/ci_utils/src/extensions/future.rs create mode 100644 build/ci_utils/src/extensions/iterator.rs create mode 100644 build/ci_utils/src/extensions/maps.rs create mode 100644 build/ci_utils/src/extensions/octocrab.rs create mode 100644 build/ci_utils/src/extensions/os.rs create mode 100644 build/ci_utils/src/extensions/output.rs create mode 100644 build/ci_utils/src/extensions/path.rs create mode 100644 build/ci_utils/src/extensions/reqwest.rs create mode 100644 build/ci_utils/src/extensions/result.rs create mode 100644 build/ci_utils/src/extensions/str.rs create mode 100644 build/ci_utils/src/extensions/version.rs create mode 100644 build/ci_utils/src/fmt.rs create mode 100644 build/ci_utils/src/fs.rs create mode 100644 build/ci_utils/src/fs/tokio.rs create mode 100644 build/ci_utils/src/fs/wrappers.rs create mode 100644 build/ci_utils/src/fs/wrappers/tokio.rs create mode 100644 build/ci_utils/src/future.rs create mode 100644 build/ci_utils/src/github.rs create mode 100644 build/ci_utils/src/github/model.rs create mode 100644 build/ci_utils/src/github/release.rs create mode 100644 build/ci_utils/src/global.rs create mode 100644 build/ci_utils/src/goodie.rs create mode 100644 build/ci_utils/src/goodies.rs create mode 100644 build/ci_utils/src/goodies/musl.rs create mode 100644 build/ci_utils/src/goodies/sbt.rs create mode 100644 build/ci_utils/src/io.rs create mode 100644 build/ci_utils/src/io/web.rs create mode 100644 build/ci_utils/src/io/web/client.rs create mode 100644 build/ci_utils/src/lib.rs create mode 100644 build/ci_utils/src/log.rs create mode 100644 build/ci_utils/src/models.rs create mode 100644 build/ci_utils/src/models/compose.rs create mode 100644 build/ci_utils/src/models/config.rs create mode 100644 build/ci_utils/src/os.rs create mode 100644 build/ci_utils/src/os/target.rs create mode 100644 build/ci_utils/src/path.rs create mode 100644 build/ci_utils/src/path/trie.rs create mode 100644 build/ci_utils/src/paths.rs create mode 100644 build/ci_utils/src/platform.rs create mode 100644 build/ci_utils/src/platform/win.rs create mode 100644 build/ci_utils/src/program.rs create mode 100644 build/ci_utils/src/program/command.rs create mode 100644 build/ci_utils/src/program/command/provider.rs create mode 100644 build/ci_utils/src/program/location.rs create mode 100644 build/ci_utils/src/program/resolver.rs create mode 100644 build/ci_utils/src/program/shell.rs create mode 100644 build/ci_utils/src/program/version.rs create mode 100644 build/ci_utils/src/program/with_cwd.rs create mode 100644 build/ci_utils/src/programs.rs create mode 100644 build/ci_utils/src/programs/cargo.rs create mode 100644 build/ci_utils/src/programs/cargo/build_env.rs create mode 100644 build/ci_utils/src/programs/cargo/clippy.rs create mode 100644 build/ci_utils/src/programs/cargo/fmt.rs create mode 100644 build/ci_utils/src/programs/cmd.rs create mode 100644 build/ci_utils/src/programs/conda.rs create mode 100644 build/ci_utils/src/programs/docker.rs create mode 100644 build/ci_utils/src/programs/flatc.rs create mode 100644 build/ci_utils/src/programs/git.rs create mode 100644 build/ci_utils/src/programs/git/clean.rs create mode 100644 build/ci_utils/src/programs/go.rs create mode 100644 build/ci_utils/src/programs/graal.rs create mode 100644 build/ci_utils/src/programs/java.rs create mode 100644 build/ci_utils/src/programs/javac.rs create mode 100644 build/ci_utils/src/programs/node.rs create mode 100644 build/ci_utils/src/programs/npx.rs create mode 100644 build/ci_utils/src/programs/pwsh.rs create mode 100644 build/ci_utils/src/programs/robocopy.rs create mode 100644 build/ci_utils/src/programs/rsync.rs create mode 100644 build/ci_utils/src/programs/rustc.rs create mode 100644 build/ci_utils/src/programs/rustup.rs create mode 100644 build/ci_utils/src/programs/sbt.rs create mode 100644 build/ci_utils/src/programs/seven_zip.rs create mode 100644 build/ci_utils/src/programs/sh.rs create mode 100644 build/ci_utils/src/programs/tar.rs create mode 100644 build/ci_utils/src/programs/vs.rs create mode 100644 build/ci_utils/src/programs/vswhere.rs create mode 100644 build/ci_utils/src/programs/wasm_opt.rs create mode 100644 build/ci_utils/src/programs/wasm_pack.rs create mode 100644 build/ci_utils/src/reqwest.rs create mode 100644 build/ci_utils/src/serde.rs create mode 100644 build/cli/Cargo.toml create mode 100644 build/cli/src/arg.rs create mode 100644 build/cli/src/arg/backend.rs create mode 100644 build/cli/src/arg/engine.rs create mode 100644 build/cli/src/arg/git_clean.rs create mode 100644 build/cli/src/arg/gui.rs create mode 100644 build/cli/src/arg/ide.rs create mode 100644 build/cli/src/arg/java_gen.rs create mode 100644 build/cli/src/arg/project_manager.rs create mode 100644 build/cli/src/arg/release.rs create mode 100644 build/cli/src/arg/runtime.rs create mode 100644 build/cli/src/arg/wasm.rs create mode 100644 build/cli/src/bin/enso-build4/main.rs create mode 100644 build/cli/src/bin/enso-disable-wasm-opt.rs create mode 100644 build/cli/src/bin/enso-remove-draft-releases.rs create mode 100644 build/cli/src/ci_gen.rs create mode 100644 build/cli/src/ci_gen/job.rs create mode 100644 build/cli/src/ci_gen/step.rs create mode 100644 build/cli/src/lib.rs rename build/{ => cli}/src/main.rs (50%) rename build/{ => deprecated}/build-utils/Cargo.toml (82%) create mode 100644 build/deprecated/build-utils/src/lib.rs rename build/{ => deprecated}/rust-scripts/Cargo.toml (100%) rename build/{ => deprecated}/rust-scripts/src/bin/test_all.rs (99%) create mode 100644 build/enso-formatter/src/lib.rs delete mode 100644 build/paths.js create mode 100644 clippy.toml diff --git a/.cargo/config b/.cargo/config deleted file mode 100644 index 35fea45a69..0000000000 --- a/.cargo/config +++ /dev/null @@ -1,11 +0,0 @@ -[build] -target-dir = "target/rust/" -rustflags = ["--cfg", "tokio_unstable"] - -[target.wasm32-unknown-unknown] -rustflags = [ - # Increas the stack size from 1MB to 2MB. This is required to avoid running out of stack space - # in debug builds. The error is reported as `RuntimeError: memory access out of bounds`. - "-C", - "link-args=-z stack-size=2097152", -] diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 0000000000..87dbf1c0db --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,11 @@ +[build] +target-dir = "target/rust/" +rustflags = ["--cfg", "tokio_unstable"] + +[target.wasm32-unknown-unknown] +rustflags = [ + # Increas the stack size from 1MB to 2MB. This is required to avoid running out of stack space + # in debug builds. The error is reported as `RuntimeError: memory access out of bounds`. + "-C", + "link-args=-z stack-size=2097152", +] diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 97d7a96a6b..977c95766a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,6 +1,11 @@ # Change log CHANGELOG.md +# Build script & utilities +/run @mwu-tow +/run.bat @mwu-tow +/build @mwu-tow + # Rust Libraries and Related Files rust-toolchain.toml @MichaelMauderer @mwu-tow @farmaazon rustfmt.toml @MichaelMauderer @mwu-tow @farmaazon @@ -20,8 +25,6 @@ Cargo.toml /lib/scala/interpreter-dsl/ @4e6 @jaroslavtulach # GUI -/run @MichaelMauderer @wdanilo -/build/ @MichaelMauderer @wdanilo /app/gui/ @MichaelMauderer @wdanilo @farmaazon @mwu-tow /app/gui/view/ @MichaelMauderer @wdanilo @farmaazon /app/ide-desktop/ @MichaelMauderer @wdanilo diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index 8240387156..3e22e2274e 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -8,9 +8,7 @@ on: workflow_dispatch: inputs: just-check: - description: - If set, benchmarks will be only checked to run correctly, not - to measure actual performance. + description: If set, benchmarks will be only checked to run correctly, not to measure actual performance. required: true type: boolean default: false @@ -22,37 +20,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -64,6 +50,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend benchmark runtime env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -73,6 +64,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: - ENSO_BUILD_MINIMAL_RUN: ${{ inputs.just-check }} + ENSO_BUILD_MINIMAL_RUN: ${{ true == inputs.just-check }} ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml new file mode 100644 index 0000000000..80e30c3568 --- /dev/null +++ b/.github/workflows/changelog.yml @@ -0,0 +1,68 @@ +name: Changelog +on: + pull_request: + types: + - labeled + - unlabeled + - synchronize + - opened + - reopened +jobs: + changelog: + name: Changelog + runs-on: + - X64 + steps: + - name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + with: + update-conda: false + conda-channels: anaconda, conda-forge + - name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 + with: + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + if: runner.os == 'Windows' + shell: cmd + - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + if: runner.os != 'Windows' + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - run: ./run changelog-check + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + if: failure() && runner.os == 'Windows' + - name: List files if failed (non-Windows) + run: ls -lAR + if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +env: + ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/.github/workflows/gui.yml b/.github/workflows/gui.yml index 36211fa106..37931de108 100644 --- a/.github/workflows/gui.yml +++ b/.github/workflows/gui.yml @@ -6,45 +6,6 @@ on: pull_request: {} workflow_dispatch: {} jobs: - enso-build-cli-ci-gen-job-assert-changelog-linux: - name: Assert if CHANGELOG.md was updated (on pull request) - runs-on: - - self-hosted - - Linux - - engine - steps: - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' - if: runner.os == 'Windows' - shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' - shell: bash - - name: Checking out the repository - uses: actions/checkout@v2 - with: - clean: false - submodules: recursive - - id: changed_files - run: |- - git fetch - list=`git diff --name-only origin/develop HEAD | tr '\n' ' '` - echo $list - echo "::set-output name=list::'$list'" - - run: - if [[ ${{ contains(steps.changed_files.outputs.list,'CHANGELOG.md') || - contains(github.event.head_commit.message,'[ci no changelog needed]') || contains(github.event.pull_request.body,'[ci - no changelog needed]') || github.event.pull_request.user.login == 'dependabot' - }} == false ]]; then exit 1; fi - if: - github.base_ref == 'develop' || github.base_ref == 'unstable' || github.base_ref - == 'stable' enso-build-cli-ci-gen-job-build-backend-linux: name: Build Backend (linux) runs-on: @@ -54,37 +15,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -96,6 +45,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend get env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -105,6 +59,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-build-backend-macos: name: Build Backend (macos) runs-on: @@ -112,37 +71,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -154,6 +101,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend get env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -163,6 +115,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-build-backend-windows: name: Build Backend (windows) runs-on: @@ -172,37 +129,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -214,6 +159,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend get env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -223,6 +173,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-build-wasm-linux: name: Build GUI (WASM) (linux) runs-on: @@ -232,37 +187,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -274,6 +217,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -283,6 +231,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-build-wasm-macos: name: Build GUI (WASM) (macos) runs-on: @@ -290,37 +243,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -332,6 +273,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -341,6 +287,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-build-wasm-windows: name: Build GUI (WASM) (windows) runs-on: @@ -350,37 +301,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -392,6 +331,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -401,6 +345,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-cancel-workflow-linux: name: Cancel Previous Runs runs-on: @@ -419,37 +368,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -461,6 +398,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run lint env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -470,6 +412,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-native-test-linux: name: Native GUI tests (linux) runs-on: @@ -479,37 +426,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -521,6 +456,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run wasm test --no-wasm env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -530,6 +470,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-package-ide-linux: name: Package IDE (linux) needs: @@ -542,37 +487,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -584,6 +517,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -593,6 +531,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-package-ide-macos: name: Package IDE (macos) needs: @@ -603,37 +546,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -645,6 +576,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run env: APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} @@ -659,6 +595,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-package-ide-windows: name: Package IDE (windows) needs: @@ -671,37 +612,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -713,6 +642,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -724,6 +658,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-wasm-test-linux: name: WASM GUI tests (linux) runs-on: @@ -733,37 +672,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -775,6 +702,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run wasm test --no-native env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -784,5 +716,10 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index e29783a4bc..9a4b05f4fd 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -13,37 +13,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -71,37 +59,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -113,6 +89,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -122,6 +103,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-upload-backend-linux: name: Upload Backend (linux) needs: @@ -133,37 +119,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -175,6 +149,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend upload env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -184,6 +163,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -196,37 +180,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -238,6 +210,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend upload env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -247,6 +224,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -261,37 +243,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -303,6 +273,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend upload env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -312,6 +287,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -327,37 +307,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -369,6 +337,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run release deploy-to-ecr env: AWS_ACCESS_KEY_ID: ${{ secrets.ECR_PUSH_RUNTIME_ACCESS_KEY_ID }} @@ -382,6 +355,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -400,37 +378,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -442,6 +408,11 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run release publish env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -451,10 +422,15 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: AWS_ACCESS_KEY_ID: ${{ secrets.ARTEFACT_S3_ACCESS_KEY_ID }} AWS_REGION: us-west-1 - AWS_SECRET_ACCESS_KEY: ${{ secrets.ARTEFACT_S3_SECRET_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.ARTEFACT_S3_SECRET_ACCESS_KEY }} ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} enso-build-cli-ci-gen-upload-ide-linux: @@ -470,37 +446,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -512,9 +476,12 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - run: - ./run ide upload --wasm-source current-ci-run --backend-source release - --backend-release ${{env.ENSO_RELEASE_ID}} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: List files if failed (Windows) @@ -523,6 +490,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -537,37 +509,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -579,9 +539,12 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - run: - ./run ide upload --wasm-source current-ci-run --backend-source release - --backend-release ${{env.ENSO_RELEASE_ID}} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} env: APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} APPLEIDPASS: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} @@ -595,6 +558,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -611,37 +579,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -653,9 +609,12 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - run: - ./run ide upload --wasm-source current-ci-run --backend-source release - --backend-release ${{env.ENSO_RELEASE_ID}} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} WIN_CSC_KEY_PASSWORD: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT_PASSWORD }} @@ -666,6 +625,11 @@ jobs: - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} diff --git a/.github/workflows/scala-new.yml b/.github/workflows/scala-new.yml index 6aa364c8ae..b6934c4651 100644 --- a/.github/workflows/scala-new.yml +++ b/.github/workflows/scala-new.yml @@ -24,37 +24,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -66,31 +54,43 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend ci-check env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Engine Test Reporter + uses: dorny/test-reporter@v1 + if: success() || failure() + with: + max-annotations: 50 + name: Engine Tests (linux) + path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml + path-replace-backslashes: true + reporter: java-junit + - name: Standard Library Test Reporter + uses: dorny/test-reporter@v1 + if: success() || failure() + with: + max-annotations: 50 + name: Standard Library Tests (linux) + path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml + path-replace-backslashes: true + reporter: java-junit - name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse if: failure() && runner.os == 'Windows' - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' - - name: Stdlib test report - uses: dorny/test-reporter@v1 - if: success() || failure() - with: - name: Enso Standard Library Tests (linux) - path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml - path-replace-backslashes: "true" - reporter: java-junit - - name: Engine test report - uses: dorny/test-reporter@v1 - if: success() || failure() - with: - name: Engine Tests (linux) - path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml - path-replace-backslashes: "true" - reporter: java-junit + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-ci-check-backend-macos: name: Engine (macos) runs-on: @@ -98,37 +98,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -140,31 +128,43 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend ci-check env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Engine Test Reporter + uses: dorny/test-reporter@v1 + if: success() || failure() + with: + max-annotations: 50 + name: Engine Tests (macos) + path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml + path-replace-backslashes: true + reporter: java-junit + - name: Standard Library Test Reporter + uses: dorny/test-reporter@v1 + if: success() || failure() + with: + max-annotations: 50 + name: Standard Library Tests (macos) + path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml + path-replace-backslashes: true + reporter: java-junit - name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse if: failure() && runner.os == 'Windows' - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' - - name: Stdlib test report - uses: dorny/test-reporter@v1 - if: success() || failure() - with: - name: Enso Standard Library Tests (macos) - path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml - path-replace-backslashes: "true" - reporter: java-junit - - name: Engine test report - uses: dorny/test-reporter@v1 - if: success() || failure() - with: - name: Engine Tests (macos) - path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml - path-replace-backslashes: "true" - reporter: java-junit + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} enso-build-cli-ci-gen-job-ci-check-backend-windows: name: Engine (windows) runs-on: @@ -174,37 +174,25 @@ jobs: steps: - name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: - startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted - Agent') + if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - - name: Setup the Artifact API environment + - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: - script: |- - core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) - core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) - core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) - run: - '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x - -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) - || :"' + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' if: runner.os == 'Windows' shell: cmd - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) - run: - "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git - hash-object -t tree /dev/null) < /dev/null) || :" + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" if: runner.os != 'Windows' shell: bash - name: Checking out the repository @@ -216,30 +204,42 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean before + run: ./run git-clean + if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend ci-check env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Engine Test Reporter + uses: dorny/test-reporter@v1 + if: success() || failure() + with: + max-annotations: 50 + name: Engine Tests (windows) + path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml + path-replace-backslashes: true + reporter: java-junit + - name: Standard Library Test Reporter + uses: dorny/test-reporter@v1 + if: success() || failure() + with: + max-annotations: 50 + name: Standard Library Tests (windows) + path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml + path-replace-backslashes: true + reporter: java-junit - name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse if: failure() && runner.os == 'Windows' - name: List files if failed (non-Windows) run: ls -lAR if: failure() && runner.os != 'Windows' - - name: Stdlib test report - uses: dorny/test-reporter@v1 - if: success() || failure() - with: - name: Enso Standard Library Tests (windows) - path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml - path-replace-backslashes: "true" - reporter: java-junit - - name: Engine test report - uses: dorny/test-reporter@v1 - if: success() || failure() - with: - name: Engine Tests (windows) - path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml - path-replace-backslashes: "true" - reporter: java-junit + - name: Clean after + run: ./run git-clean + if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} env: ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/Cargo.lock b/Cargo.lock index 68f4567678..e893250742 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -40,10 +40,21 @@ dependencies = [ ] [[package]] -name = "aho-corasick" -version = "0.7.18" +name = "ahash" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.7", + "once_cell", + "version_check 0.9.4", +] + +[[package]] +name = "aho-corasick" +version = "0.7.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e" dependencies = [ "memchr", ] @@ -62,6 +73,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "ansi_term" version = "0.12.1" @@ -73,9 +93,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.57" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc" +checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602" [[package]] name = "approx" @@ -106,9 +126,9 @@ dependencies = [ [[package]] name = "arc-swap" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f" +checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164" [[package]] name = "ascii" @@ -116,6 +136,16 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eab1c04a571841102f5345a8fc0f6bb3d31c315dec879b5c6e42e40ce7ffa34e" +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "assert_approx_eq" version = "1.1.0" @@ -156,9 +186,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "1.6.1" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2114d64672151c0c5eaa5e131ec84a74f06e1e559830dabba01ca30605d66319" +checksum = "e14485364214912d3b19cc3435dde4df66065127f05fa0d75c712f36f12c2f28" dependencies = [ "concurrent-queue", "event-listener", @@ -174,8 +204,8 @@ dependencies = [ "flate2", "futures-core", "memchr", - "pin-project-lite 0.2.9", - "tokio 1.19.2", + "pin-project-lite", + "tokio", ] [[package]] @@ -194,26 +224,26 @@ dependencies = [ [[package]] name = "async-global-executor" -version = "2.0.4" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c290043c9a95b05d45e952fb6383c67bcb61471f60cfa21e890dba6654234f43" +checksum = "0da5b41ee986eed3f524c380e6d64965aea573882a8907682ad100f7859305ca" dependencies = [ "async-channel", "async-executor", "async-io", - "async-mutex", + "async-lock", "blocking", "futures-lite", - "num_cpus", "once_cell", ] [[package]] name = "async-io" -version = "1.7.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5e18f61464ae81cde0a23e713ae8fd299580c54d697a35820cfd0625b8b0e07" +checksum = "83e21f3a490c72b3b0cf44962180e60045de2925d8dff97918f7ee43c8f637c7" dependencies = [ + "autocfg 1.1.0", "concurrent-queue", "futures-lite", "libc", @@ -222,7 +252,7 @@ dependencies = [ "parking", "polling", "slab", - "socket2 0.4.4", + "socket2", "waker-fn", "winapi 0.3.9", ] @@ -236,26 +266,17 @@ dependencies = [ "event-listener", ] -[[package]] -name = "async-mutex" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e" -dependencies = [ - "event-listener", -] - [[package]] name = "async-std" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52580991739c5cdb36cde8b2a516371c0a3b70dda36d916cc08b82372916808c" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" dependencies = [ "async-channel", "async-global-executor", "async-io", "async-lock", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", "futures-channel", "futures-core", "futures-io", @@ -264,9 +285,8 @@ dependencies = [ "kv-log-macro", "log 0.4.17", "memchr", - "num_cpus", "once_cell", - "pin-project-lite 0.2.9", + "pin-project-lite", "pin-utils", "slab", "wasm-bindgen-futures", @@ -295,15 +315,15 @@ dependencies = [ [[package]] name = "async-task" -version = "4.2.0" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30696a84d817107fc028e049980e09d5e140e8da8f1caeb17e8e950658a3cea9" +checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" [[package]] name = "async-trait" -version = "0.1.53" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6aa3524a2dfcf9fe180c51eae2b58738348d819517ceadf95789c51fff7600" +checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f" dependencies = [ "proc-macro2", "quote", @@ -350,9 +370,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aws-config" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a3ad9e793335d75b2d2faad583487efcc0df9154aff06f299a5c1fc8795698" +checksum = "b309b2154d224728d845a958c580834f24213037ed61b195da80c0b0fc7469fa" dependencies = [ "aws-http", "aws-sdk-sso", @@ -367,10 +387,10 @@ dependencies = [ "bytes 1.1.0", "hex", "http", - "hyper 0.14.18", + "hyper 0.14.20", "ring", - "time 0.3.9", - "tokio 1.19.2", + "time 0.3.14", + "tokio", "tower", "tracing", "zeroize", @@ -378,11 +398,12 @@ dependencies = [ [[package]] name = "aws-endpoint" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bd4e9dad553017821ee529f186e033700e8d61dd5c4b60066b4d8fe805b8cfc" +checksum = "76f35c8f5877ad60db4f0d9dcdfbcb2233a8cc539f9e568df39ee0581ec62e89" dependencies = [ "aws-smithy-http", + "aws-smithy-types", "aws-types", "http", "regex", @@ -391,27 +412,27 @@ dependencies = [ [[package]] name = "aws-http" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ef5a579a51d352b628b76f4855ba716be686305e5e59970c476d1ae2214e90d" +checksum = "2f5422c9632d887968ccb66e2871a6d190d6104e276034912bee72ef58a5d890" dependencies = [ "aws-smithy-http", "aws-smithy-types", "aws-types", "bytes 1.1.0", "http", - "http-body 0.4.5", + "http-body", "lazy_static", "percent-encoding 2.1.0", - "pin-project-lite 0.2.9", + "pin-project-lite", "tracing", ] [[package]] name = "aws-sdk-ecr" -version = "0.17.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84bf0237b7c85f440ba6e19d23696751200d33c0b481a246eb7df869aa231cea" +checksum = "785628f1dccf6a0ee1e1c9a705c7438111c1cc1850d1c49630bc1faacd130e6b" dependencies = [ "aws-endpoint", "aws-http", @@ -431,9 +452,9 @@ dependencies = [ [[package]] name = "aws-sdk-s3" -version = "0.17.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d2c19b69297f16b3f18936e363f954e7504c23a4a0dc3f2833712313c09c2aa" +checksum = "a4d31765abb258c501d5572ebce43dee524b4b3b6256cb8b4c78534898dc205b" dependencies = [ "aws-endpoint", "aws-http", @@ -451,7 +472,7 @@ dependencies = [ "bytes 1.1.0", "bytes-utils", "http", - "http-body 0.4.5", + "http-body", "tokio-stream", "tower", "tracing", @@ -459,9 +480,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "0.17.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f014b8ad3178b414bf732b36741325ef659fc40752f8c292400fb7c4ecb7fdd0" +checksum = "e2cc8b50281e1350d0b5c7207c2ce53c6721186ad196472caff4f20fa4b42e96" dependencies = [ "aws-endpoint", "aws-http", @@ -481,9 +502,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "0.17.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d37e45fdce84327c69fb924b9188fd889056c6afafbd494e8dd0daa400f9c082" +checksum = "d6179f13c9fbab3226860f377354dece860e34ff129b69c7c1b0fa828d1e9c76" dependencies = [ "aws-endpoint", "aws-http", @@ -503,9 +524,9 @@ dependencies = [ [[package]] name = "aws-sig-auth" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6530e72945c11439e9b3c423c95a656a233d73c3a7d4acaf9789048e1bdf7da7" +checksum = "b16f4d70c9c865af392eb40cacfe2bec3fa18f651fbdf49919cfc1dda13b189e" dependencies = [ "aws-sigv4", "aws-smithy-eventstream", @@ -517,9 +538,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6351c3ba468b04bd819f64ea53538f5f53e3d6b366b27deabee41e73c9edb3af" +checksum = "8d33790cecae42b999d197074c8a19e9b96b9e346284a6f93989e7489c9fa0f5" dependencies = [ "aws-smithy-eventstream", "aws-smithy-http", @@ -531,27 +552,27 @@ dependencies = [ "percent-encoding 2.1.0", "regex", "ring", - "time 0.3.9", + "time 0.3.14", "tracing", ] [[package]] name = "aws-smithy-async" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fc23ad8d050c241bdbfa74ae360be94a844ace8e218f64a2b2de77bfa9a707" +checksum = "bc604f278bae64bbd15854baa9c46ed69a56dfb0669d04aab80974749f2d6599" dependencies = [ "futures-util", - "pin-project-lite 0.2.9", - "tokio 1.19.2", + "pin-project-lite", + "tokio", "tokio-stream", ] [[package]] name = "aws-smithy-checksums" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dd674df030b337a84eb67539db048676c691d9c88f0c54cf7748da11836cfd8" +checksum = "4b402da39bc5aae618b70a9b8d828acad21fe4a3a73b82c0205b89db55d71ce8" dependencies = [ "aws-smithy-http", "aws-smithy-types", @@ -560,19 +581,19 @@ dependencies = [ "crc32fast", "hex", "http", - "http-body 0.4.5", + "http-body", "md-5", - "pin-project-lite 0.2.9", - "sha1 0.10.1", + "pin-project-lite", + "sha1 0.10.5", "sha2", "tracing", ] [[package]] name = "aws-smithy-client" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e147b157f49ce77f2a86ec693a14c84b2441fa28be58ffb2febb77d5726c934" +checksum = "ec39585f8274fa543ad5c63cc09cbd435666be16b2cf99e4e07be5cf798bc050" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -581,21 +602,21 @@ dependencies = [ "bytes 1.1.0", "fastrand", "http", - "http-body 0.4.5", - "hyper 0.14.18", + "http-body", + "hyper 0.14.20", "hyper-rustls 0.22.1", "lazy_static", - "pin-project-lite 0.2.9", - "tokio 1.19.2", + "pin-project-lite", + "tokio", "tower", "tracing", ] [[package]] name = "aws-smithy-eventstream" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da29e67a0b90a2bc5f2bd0a06fd43e728de62e02048879c15f646a3edf8db012" +checksum = "98c2a7b9490fd2bc7af3a1c486ae921102d7234d1fa5e7d91039068e7af48a01" dependencies = [ "aws-smithy-types", "bytes 1.1.0", @@ -604,9 +625,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc1af50eac644ab6f58e5bae29328ba3092851fc2ce648ad139134699b2b66f" +checksum = "014a0ef5c4508fc2f6a9d3925c214725af19f020ea388db48e20196cc4cc9d6d" dependencies = [ "aws-smithy-eventstream", "aws-smithy-types", @@ -614,45 +635,45 @@ dependencies = [ "bytes-utils", "futures-core", "http", - "http-body 0.4.5", - "hyper 0.14.18", + "http-body", + "hyper 0.14.20", "once_cell", "percent-encoding 2.1.0", - "pin-project-lite 0.2.9", - "tokio 1.19.2", - "tokio-util 0.7.2", + "pin-project-lite", + "tokio", + "tokio-util", "tracing", ] [[package]] name = "aws-smithy-http-tower" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1bf4c4664dff2febf91f8796505c5bc8f38a0bff0d1397d1d3fdda17bd5c5d1" +checksum = "deecb478dc3cc40203e0e97ac0fb92947e0719754bbafd0026bdc49318e2fd03" dependencies = [ "aws-smithy-http", "bytes 1.1.0", "http", - "http-body 0.4.5", - "pin-project-lite 0.2.9", + "http-body", + "pin-project-lite", "tower", "tracing", ] [[package]] name = "aws-smithy-json" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e6ebc76c3c108dd2a96506bf47dc31f75420811a19f1a09907524d1451789d2" +checksum = "6593456af93c4a39724f7dc9d239833102ab96c1d1e94c35ea79f0e55f9fd54c" dependencies = [ "aws-smithy-types", ] [[package]] name = "aws-smithy-query" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2956f1385c4daa883907a2c81d32256af8f95834c9de1bc0613fa68db63b88c4" +checksum = "b803460b71645dfa9f6be47c4f00f91632f01e5bb01f9dc43890cd6cba983f08" dependencies = [ "aws-smithy-types", "urlencoding", @@ -660,30 +681,30 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "352fb335ec1d57160a17a13e87aaa0a172ab780ddf58bfc85caedd3b7e47caed" +checksum = "e93b0c93a3b963da946a0b8ef3853a7252298eb75cdbfb21dad60f5ed0ded861" dependencies = [ - "itoa 1.0.2", + "itoa 1.0.3", "num-integer", "ryu", - "time 0.3.9", + "time 0.3.14", ] [[package]] name = "aws-smithy-xml" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cf2807fa715a5a3296feffb06ce45252bd0dfd48f52838128c48fb339ddbf5c" +checksum = "36b9efb4855b4acb29961a776d45680f3cbdd7c4783cbbae078da54c342575dd" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8140b89d76f67be2c136d7393e7e6d8edd65424eb58214839efbf4a2e4f7e8a3" +checksum = "93f3f349b39781849261db1c727369923bb97007cf7bd0deb3a6e9e461c8d38f" dependencies = [ "aws-smithy-async", "aws-smithy-client", @@ -697,9 +718,9 @@ dependencies = [ [[package]] name = "axum" -version = "0.5.6" +version = "0.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab2504b827a8bef941ba3dd64bdffe9cf56ca182908a147edd6189c95fbcae7d" +checksum = "c9e3356844c4d6a6d6467b8da2cffb4a2820be256f50a3a386c9d152bab31043" dependencies = [ "async-trait", "axum-core", @@ -707,17 +728,17 @@ dependencies = [ "bytes 1.1.0", "futures-util", "http", - "http-body 0.4.5", - "hyper 0.14.18", - "itoa 1.0.2", + "http-body", + "hyper 0.14.20", + "itoa 1.0.3", "matchit", "memchr", "mime 0.3.16", "percent-encoding 2.1.0", - "pin-project-lite 0.2.9", + "pin-project-lite", "serde", "sync_wrapper", - "tokio 1.19.2", + "tokio", "tower", "tower-http", "tower-layer", @@ -726,30 +747,32 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.2.4" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da31c0ed7b4690e2c78fe4b880d21cd7db04a346ebc658b4270251b695437f17" +checksum = "d9f0c0a60006f2a293d82d571f635042a72edf927539b7685bd62d361963839b" dependencies = [ "async-trait", "bytes 1.1.0", "futures-util", "http", - "http-body 0.4.5", + "http-body", "mime 0.3.16", + "tower-layer", + "tower-service", ] [[package]] name = "backtrace" -version = "0.3.65" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61" +checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7" dependencies = [ "addr2line", "cc", "cfg-if 1.0.0", "libc", "miniz_oxide", - "object 0.28.4", + "object 0.29.0", "rustc-demangle", ] @@ -772,12 +795,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "base64" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" - [[package]] name = "base64" version = "0.13.0" @@ -852,11 +869,11 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] @@ -900,6 +917,16 @@ dependencies = [ "serde", ] +[[package]] +name = "buf_redux" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f" +dependencies = [ + "memchr", + "safemem", +] + [[package]] name = "build-scripts" version = "0.1.0" @@ -910,9 +937,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.9.1" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" +checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d" [[package]] name = "byte-tools" @@ -958,12 +985,6 @@ dependencies = [ "iovec", ] -[[package]] -name = "bytes" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" - [[package]] name = "bytes" version = "1.1.0" @@ -972,9 +993,9 @@ checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" [[package]] name = "bytes-utils" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1934a3ef9cac8efde4966a92781e77713e1ba329f1d42e446c7d7eba340d8ef1" +checksum = "e47d3a8076e283f3acd27400535992edb3ba4b5bb72f8891ad8fbe7932a7d4b9" dependencies = [ "bytes 1.1.0", "either", @@ -1009,51 +1030,21 @@ checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c" [[package]] name = "cached" -version = "0.34.0" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aadf76ddea74bab35ebeb8f1eb115b9bc04eaee42d8acc0d5f477dee6b176c9a" +checksum = "f3e27085975166ffaacbd04527132e1cf5906fa612991f9b4fea08e787da2961" dependencies = [ "async-trait", "async_once", - "cached_proc_macro 0.12.0", + "cached_proc_macro", "cached_proc_macro_types", - "futures 0.3.21", - "hashbrown", - "lazy_static", - "once_cell", - "thiserror", - "tokio 1.19.2", -] - -[[package]] -name = "cached" -version = "0.38.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27e6092f8c7ba6e65a46f6f26d7d7997201d3a6f0e69ff5d2440b930d7c0513a" -dependencies = [ - "async-trait", - "async_once", - "cached_proc_macro 0.15.0", - "cached_proc_macro_types", - "futures 0.3.21", + "futures 0.3.24", "hashbrown", "instant", "lazy_static", "once_cell", "thiserror", - "tokio 1.19.2", -] - -[[package]] -name = "cached_proc_macro" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bce0f37f9b77c6b93cdf3f060c89adca303d2ab052cacb3c3d1ab543e8cecd2f" -dependencies = [ - "cached_proc_macro_types", - "darling", - "quote", - "syn", + "tokio", ] [[package]] @@ -1076,12 +1067,9 @@ checksum = "3a4f925191b4367301851c6d99b09890311d74b0d43f274c0b34c86d308a3663" [[package]] name = "cast" -version = "0.2.7" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c24dab4283a142afa2fdca129b80ad2c6284e073930f964c3a1293c225ee39a" -dependencies = [ - "rustc_version 0.4.0", -] +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" @@ -1112,15 +1100,17 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.19" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" dependencies = [ - "libc", + "iana-time-zone", + "js-sys", "num-integer", "num-traits", "serde", "time 0.1.44", + "wasm-bindgen", "winapi 0.3.9", ] @@ -1130,7 +1120,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] @@ -1146,9 +1136,9 @@ dependencies = [ [[package]] name = "clap" -version = "3.1.18" +version = "3.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2dbdf4bdacb33466e854ce889eee8dfd5729abf7ccd7664d0a2d60cd384440b" +checksum = "85a35a599b11c089a7f49105658d089b8f2cf0882993c17daf6de15285c2c35d" dependencies = [ "atty", "bitflags", @@ -1164,11 +1154,11 @@ dependencies = [ [[package]] name = "clap_derive" -version = "3.1.18" +version = "3.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c" +checksum = "a3aab4734e083b809aaf5794e14e756d1c798d2c69c7f7de7a09a2f5214993c1" dependencies = [ - "heck 0.4.0", + "heck", "proc-macro-error", "proc-macro2", "quote", @@ -1177,9 +1167,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.2.0" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a37c35f1112dad5e6e0b1adaff798507497a18fceeb30cceb3bae7d1427b9213" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" dependencies = [ "os_str_bytes", ] @@ -1226,9 +1216,9 @@ dependencies = [ [[package]] name = "combine" -version = "4.6.4" +version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a604e93b79d1808327a6fca85a6f2d69de66461e7620f5a4cbf5fb4d1d7c948" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ "bytes 1.1.0", "memchr", @@ -1236,9 +1226,9 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "1.2.2" +version = "1.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3" +checksum = "af4780a44ab5696ea9e28294517f1fffb421a83a25af521333c838635509db9c" dependencies = [ "cache-padded", ] @@ -1249,19 +1239,18 @@ version = "0.1.0" dependencies = [ "Inflector", "serde", - "serde_yaml 0.8.24", + "serde_yaml 0.8.26", ] [[package]] name = "console" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28b32d32ca44b70c3e4acd7db1babf555fa026e385fb95f18028f88848b3c31" +checksum = "89eab4d20ce20cea182308bca13088fecea9c05f6776cf287205d41a0ed3c847" dependencies = [ "encode_unicode", "libc", "once_cell", - "regex", "terminal_size", "unicode-width", "winapi 0.3.9", @@ -1269,9 +1258,9 @@ dependencies = [ [[package]] name = "console-api" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06c5fd425783d81668ed68ec98408a80498fb4ae2fd607797539e1a9dfa3618f" +checksum = "e57ff02e8ad8e06ab9731d5dc72dc23bef9200778eae1a89d555d8c42e5d4a86" dependencies = [ "prost", "prost-types", @@ -1281,21 +1270,21 @@ dependencies = [ [[package]] name = "console-subscriber" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31432bc31ff8883bf6a693a79371862f73087822470c82d6a1ec778781ee3978" +checksum = "e933c43a5db3779b3600cdab18856af2411ca2237e33ba8ab476d5d5b1a6c1e7" dependencies = [ "console-api", "crossbeam-channel", - "crossbeam-utils 0.8.8", - "futures 0.3.21", + "crossbeam-utils 0.8.11", + "futures 0.3.24", "hdrhistogram", "humantime 2.1.0", "prost-types", "serde", "serde_json", "thread_local", - "tokio 1.19.2", + "tokio", "tokio-stream", "tonic", "tracing", @@ -1314,9 +1303,9 @@ dependencies = [ [[package]] name = "const_format" -version = "0.2.23" +version = "0.2.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0936ffe6d0c8d6a51b3b0a73b2acbe925d786f346cf45bfddc8341d79fb7dc8a" +checksum = "939dc9e2eb9077e0679d2ce32de1ded8531779360b003b4a972a7a39ec263495" dependencies = [ "const_format_proc_macros", ] @@ -1350,6 +1339,15 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb4a24b1aaf0fd0ce8b45161144d6f42cd91677fd5940fd431183eb023b3a2b8" +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "core-foundation" version = "0.9.3" @@ -1368,9 +1366,9 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", ] @@ -1395,16 +1393,16 @@ dependencies = [ [[package]] name = "criterion" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1604dafd25fba2fe2d5895a9da139f8dc9b319a5fe5354ca137cbbce4e178d10" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" dependencies = [ "atty", "cast", "clap 2.34.0", "criterion-plot", "csv", - "itertools 0.10.3", + "itertools 0.10.5", "lazy_static", "num-traits", "oorandom", @@ -1421,19 +1419,19 @@ dependencies = [ [[package]] name = "criterion-plot" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00996de9f2f7559f7f4dc286073197f83e92256a59ed395f9aac01fe717da57" +checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" dependencies = [ "cast", - "itertools 0.10.3", + "itertools 0.10.5", ] [[package]] name = "cron" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76219e9243e100d5a37676005f08379297f8addfebc247613299600625c734d" +checksum = "1ff76b51e4c068c52bfd2866e1567bee7c567ae8f24ada09fd4307019e25eab7" dependencies = [ "chrono", "nom", @@ -1442,36 +1440,36 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", ] [[package]] name = "crossbeam-deque" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", ] [[package]] name = "crossbeam-epoch" -version = "0.9.8" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c" +checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1" dependencies = [ "autocfg 1.1.0", "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", - "lazy_static", + "crossbeam-utils 0.8.11", "memoffset", + "once_cell", "scopeguard", ] @@ -1488,21 +1486,21 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.8" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" +checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc" dependencies = [ "cfg-if 1.0.0", - "lazy_static", + "once_cell", ] [[package]] name = "crypto-common" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", "typenum", ] @@ -1539,9 +1537,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f877be4f7c9f246b183111634f75baa039715e3f46ce860677d3b19a69fb229c" +checksum = "cdffe87e1d521a10f9696f833fe502293ea446d7f256c06128293a4119bdf4cb" dependencies = [ "quote", "syn", @@ -1588,6 +1586,25 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" +[[package]] +name = "deadpool" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e" +dependencies = [ + "async-trait", + "deadpool-runtime", + "num_cpus", + "retain_mut", + "tokio", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" + [[package]] name = "debug-scene-component-group" version = "0.1.0" @@ -1641,7 +1658,7 @@ dependencies = [ "ensogl-hardcoded-theme", "ensogl-text-msdf", "ide-view", - "parser", + "parser-scala", "span-tree", "uuid 0.8.2", "wasm-bindgen", @@ -1721,11 +1738,11 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" +checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c" dependencies = [ - "block-buffer 0.10.2", + "block-buffer 0.10.3", "crypto-common", "subtle", ] @@ -1769,8 +1786,8 @@ dependencies = [ "enso-profiler", "enso-text", "failure", - "itertools 0.10.3", - "parser", + "itertools 0.10.5", + "parser-scala", "regex", "serde", "uuid 0.8.2", @@ -1785,9 +1802,9 @@ checksum = "4bb454f0228b18c7f4c3b0ebbee346ed9c52e7443b0999cd543ff3571205701d" [[package]] name = "either" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "enclose" @@ -1814,7 +1831,7 @@ dependencies = [ name = "engine-protocol" version = "0.1.0" dependencies = [ - "bytes 0.5.6", + "bytes 1.1.0", "chrono", "enso-build-utilities", "enso-data-structures", @@ -1826,20 +1843,20 @@ dependencies = [ "failure", "flatbuffers", "flatc-rust", - "futures 0.3.21", + "futures 0.3.24", "hex", "json-rpc", "mockall", - "reqwest 0.10.10", + "reqwest", "serde", "serde_json", "sha3", "strum", "strum_macros", - "tokio 0.2.25", + "tokio", "uuid 0.8.2", "wasm-bindgen-test", - "zip 0.5.13", + "zip 0.6.2", "zip-extensions", ] @@ -1853,7 +1870,6 @@ dependencies = [ [[package]] name = "enso-build" version = "0.1.0" -source = "git+https://github.com/enso-org/ci-build?branch=develop#e283a55fba4b43bb3eeec4ce2d3982d20c8748d2" dependencies = [ "anyhow", "async-compression", @@ -1864,10 +1880,10 @@ dependencies = [ "base64 0.13.0", "byte-unit", "bytes 1.1.0", - "cached 0.38.0", + "cached", "cfg-if 1.0.0", "chrono", - "clap 3.1.18", + "clap 3.1.15", "console-subscriber", "derivative", "derive_more", @@ -1876,23 +1892,22 @@ dependencies = [ "flate2", "flume", "fs_extra", - "futures 0.3.21", + "futures 0.3.24", "futures-util", "glob", - "heck 0.4.0", + "heck", "humantime 2.1.0", "ide-ci", - "ifmt", "indexmap", "indicatif", - "itertools 0.10.3", + "itertools 0.10.5", "lazy_static", "log 0.4.17", "mime 0.3.16", "nix", "octocrab", "ouroboros", - "paste 1.0.7", + "paste 1.0.9", "path-absolutize", "pin-project", "platforms", @@ -1901,24 +1916,23 @@ dependencies = [ "pulldown-cmark", "rand 0.8.5", "regex", - "reqwest 0.11.10", + "reqwest", "scopeguard", - "semver 1.0.9", + "semver 1.0.14", "serde", "serde_json", - "serde_yaml 0.9.10", + "serde_yaml 0.9.13", "shrinkwraprs 0.3.0", - "snafu", "strum", - "sysinfo 0.25.3", + "sysinfo", "tar", "tempfile", - "tokio 1.19.2", + "tokio", "toml", "tracing", "tracing-subscriber", "unicase 2.6.0", - "url 2.2.2", + "url 2.3.0", "uuid 1.1.2", "walkdir", "which", @@ -1929,25 +1943,27 @@ dependencies = [ [[package]] name = "enso-build-cli" version = "0.1.0" -source = "git+https://github.com/enso-org/ci-build?branch=develop#e283a55fba4b43bb3eeec4ce2d3982d20c8748d2" dependencies = [ "anyhow", "byte-unit", "chrono", - "clap 3.1.18", + "clap 3.1.15", "derivative", "enso-build", - "futures 0.3.21", + "enso-formatter", + "futures 0.3.24", "futures-util", + "glob", "humantime 2.1.0", "ide-ci", "octocrab", "serde", "serde_json", - "serde_yaml 0.9.10", + "serde_yaml 0.9.13", "strum", "tempfile", - "tokio 1.19.2", + "tokio", + "toml", "tracing", "tracing-subscriber", ] @@ -1956,18 +1972,10 @@ dependencies = [ name = "enso-build-utilities" version = "0.1.0" dependencies = [ - "path-clean", - "reqwest 0.10.10", - "serde", -] - -[[package]] -name = "enso-build3" -version = "0.1.0" -dependencies = [ - "enso-build", - "enso-build-cli", "ide-ci", + "path-clean", + "reqwest", + "serde", ] [[package]] @@ -1985,7 +1993,7 @@ dependencies = [ "enso-logger", "enso-prelude", "ensogl", - "semver 1.0.9", + "semver 1.0.14", ] [[package]] @@ -2006,7 +2014,7 @@ name = "enso-debug-api" version = "0.1.0" dependencies = [ "derivative", - "futures 0.3.21", + "futures 0.3.24", "js-sys", "wasm-bindgen", "web-sys", @@ -2028,8 +2036,9 @@ dependencies = [ name = "enso-formatter" version = "0.1.0" dependencies = [ - "lazy_static", + "ide-ci", "regex", + "tokio", ] [[package]] @@ -2091,18 +2100,18 @@ dependencies = [ "ensogl-text-msdf", "failure", "flo_stream", - "futures 0.3.21", + "futures 0.3.24", "fuzzly", "ide-view", "ide-view-component-group", - "itertools 0.10.3", + "itertools 0.10.5", "js-sys", "json-rpc", "mockall", "nalgebra 0.26.2", - "parser", + "parser-scala", "regex", - "semver 1.0.9", + "semver 1.0.14", "serde", "serde_json", "sha3", @@ -2251,19 +2260,19 @@ dependencies = [ "enso-shapely", "enso-web", "failure", - "futures 0.3.21", + "futures 0.3.24", "gen-iter", "ifmt", - "itertools 0.10.3", + "itertools 0.10.5", "lazy_static", "nalgebra 0.26.2", "num", "object 0.24.0", - "paste 1.0.7", + "paste 1.0.9", "serde", "serde_json", "shrinkwraprs 0.3.0", - "smallvec 1.8.0", + "smallvec 1.9.0", "tracing", "tracing-subscriber", "tracing-wasm", @@ -2279,7 +2288,7 @@ version = "0.1.0" dependencies = [ "enso-profiler-macros", "enso-web", - "futures 0.3.21", + "futures 0.3.24", "serde", "serde_json", "wasm-bindgen", @@ -2292,7 +2301,7 @@ dependencies = [ "derivative", "enso-prelude", "enso-profiler", - "futures 0.3.21", + "futures 0.3.24", "serde", "serde_json", ] @@ -2302,7 +2311,7 @@ name = "enso-profiler-demo-data" version = "0.1.0" dependencies = [ "enso-profiler", - "futures 0.3.21", + "futures 0.3.24", ] [[package]] @@ -2324,7 +2333,7 @@ version = "0.1.0" dependencies = [ "enso-profiler", "enso-profiler-data", - "futures 0.3.21", + "futures 0.3.24", ] [[package]] @@ -2418,7 +2427,7 @@ dependencies = [ "enso-reflect", "nalgebra 0.26.2", "num-traits", - "paste 1.0.7", + "paste 1.0.9", "serde", ] @@ -2502,16 +2511,16 @@ dependencies = [ "ensogl-text-embedded-fonts", "enum_dispatch", "failure", - "itertools 0.10.3", + "itertools 0.10.5", "js-sys", "nalgebra 0.26.2", "num-traits", "num_enum", "rustc-hash", - "semver 1.0.9", + "semver 1.0.14", "serde", "shrinkwraprs 0.3.0", - "smallvec 1.8.0", + "smallvec 1.9.0", "typenum", "wasm-bindgen", "wasm-bindgen-test", @@ -2624,7 +2633,7 @@ dependencies = [ "ensogl-grid-view", "ensogl-hardcoded-theme", "ensogl-text-msdf", - "itertools 0.10.3", + "itertools 0.10.5", "wasm-bindgen", ] @@ -2669,10 +2678,10 @@ dependencies = [ "ensogl-text", "ensogl-text-msdf", "ensogl-tooltip", - "futures 0.3.21", + "futures 0.3.24", "qstring", "serde", - "url 2.2.2", + "url 2.3.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -2693,7 +2702,7 @@ dependencies = [ "ensogl-text", "ensogl-text-msdf", "ensogl-tooltip", - "futures 0.3.21", + "futures 0.3.24", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -2816,7 +2825,7 @@ dependencies = [ "ensogl-scroll-area", "ensogl-shadow", "ensogl-text", - "itertools 0.10.3", + "itertools 0.10.5", "segment-tree", ] @@ -2950,8 +2959,9 @@ dependencies = [ "enso-build-utilities", "enso-prelude", "ensogl-text-font-family", + "ide-ci", "owned_ttf_parser", - "tokio 1.19.2", + "tokio", "zip 0.5.13", ] @@ -2974,11 +2984,13 @@ dependencies = [ "ensogl-text-embedded-fonts", "ensogl-text-font-family", "failure", - "futures 0.3.21", + "futures 0.3.24", + "ide-ci", "js-sys", "nalgebra 0.26.2", "owned_ttf_parser", "serde", + "tokio", "wasm-bindgen", "wasm-bindgen-test", ] @@ -3028,9 +3040,9 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.2" +version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "failure" @@ -3056,23 +3068,23 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" dependencies = [ "instant", ] [[package]] name = "filetime" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0408e2626025178a6a7f7ffc05a25bc47103229f19c113755de7bf63816290c" +checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.13", - "winapi 0.3.9", + "redox_syscall 0.2.16", + "windows-sys", ] [[package]] @@ -3095,13 +3107,11 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.23" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af" +checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" dependencies = [ - "cfg-if 1.0.0", "crc32fast", - "libc", "miniz_oxide", ] @@ -3111,8 +3121,8 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b02e0d3667b27514149c1ac9b372d700f3e6df4bbaf6b7c5df12915de2996049" dependencies = [ - "futures 0.3.21", - "smallvec 1.8.0", + "futures 0.3.24", + "smallvec 1.9.0", ] [[package]] @@ -3132,15 +3142,15 @@ checksum = "fb23b6902f3cdc0544f9916b4c092f46f4ff984e219d5a0c538b6b3539885af3" [[package]] name = "flume" -version = "0.10.12" +version = "0.10.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843c03199d0c0ca54bc1ea90ac0d507274c28abcc4f691ae8b4eaa375087c76a" +checksum = "1657b4441c3403d9f7b3409e47575237dac27b1b5726df654a6ecbf92f0f7577" dependencies = [ "futures-core", "futures-sink", "nanorand", "pin-project", - "spin 0.9.3", + "spin 0.9.4", ] [[package]] @@ -3187,9 +3197,9 @@ dependencies = [ [[package]] name = "fragile" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9d758e60b45e8d749c89c1b389ad8aee550f86aa12e2b9298b546dda7a82ab1" +checksum = "85dcb89d2b10c5f6133de2efd8c11959ce9dbb46a2f7a4cab208c4eeda6ce1ab" [[package]] name = "fs_extra" @@ -3227,9 +3237,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" +checksum = "7f21eda599937fba36daeb58a22e8f5cee2d14c4a17b5b7739c7c8e5e3b8230c" dependencies = [ "futures-channel", "futures-core", @@ -3242,9 +3252,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" +checksum = "30bdd20c28fadd505d0fd6712cdfcb0d4b5648baf45faef7f852afb2399bb050" dependencies = [ "futures-core", "futures-sink", @@ -3252,15 +3262,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" +checksum = "4e5aa3de05362c3fb88de6531e6296e85cde7739cccad4b9dfeeb7f6ebce56bf" [[package]] name = "futures-executor" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" +checksum = "9ff63c23854bee61b6e9cd331d523909f238fc7636290b96826e9cfa5faa00ab" dependencies = [ "futures-core", "futures-task", @@ -3269,9 +3279,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" +checksum = "bbf4d2a7a308fd4578637c0b17c7e1c7ba127b8f6ba00b29f717e9655d85eb68" [[package]] name = "futures-lite" @@ -3284,15 +3294,15 @@ dependencies = [ "futures-io", "memchr", "parking", - "pin-project-lite 0.2.9", + "pin-project-lite", "waker-fn", ] [[package]] name = "futures-macro" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" +checksum = "42cd15d1c7456c04dbdf7e88bcd69760d74f3a798d6444e16974b505b0e62f17" dependencies = [ "proc-macro2", "quote", @@ -3301,21 +3311,27 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" +checksum = "21b20ba5a92e727ba30e72834706623d94ac93a725410b6a6b6fbc1b07f7ba56" [[package]] name = "futures-task" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" +checksum = "a6508c467c73851293f390476d4491cf4d227dbabcd4170f3bb6044959b294f1" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" +checksum = "44fb6cb1be61cc1d2e43b262516aafcf63b241cffdb1d3fa115f91d9c7b09c90" dependencies = [ "futures-channel", "futures-core", @@ -3324,7 +3340,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.9", + "pin-project-lite", "pin-utils", "slab", ] @@ -3362,9 +3378,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check 0.9.4", @@ -3405,9 +3421,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" +checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" [[package]] name = "glob" @@ -3438,19 +3454,19 @@ dependencies = [ [[package]] name = "graphql-parser" -version = "0.2.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5613c31f18676f164112732202124f373bb2103ff017b3b85ca954ea6a66ada" +checksum = "d2ebc8013b4426d5b81a4364c419a95ed0b404af2b82e2457de52d9348f0e474" dependencies = [ "combine 3.8.1", - "failure", + "thiserror", ] [[package]] name = "graphql_client" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9b58571cfc3cc42c3e8ff44fc6cfbb6c0dea17ed22d20f9d8f1efc4e8209a3f" +checksum = "7fc16d75d169fddb720d8f1c7aed6413e329e1584079b9734ff07266a193f5bc" dependencies = [ "graphql_query_derive", "serde", @@ -3459,13 +3475,13 @@ dependencies = [ [[package]] name = "graphql_client_codegen" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4bf9cd823359d74ad3d3ecf1afd4a975f4ff2f891cdf9a66744606daf52de8c" +checksum = "f290ecfa3bea3e8a157899dc8a1d96ee7dd6405c18c8ddd213fc58939d18a0e9" dependencies = [ "graphql-introspection-query", "graphql-parser", - "heck 0.3.3", + "heck", "lazy_static", "proc-macro2", "quote", @@ -3476,9 +3492,9 @@ dependencies = [ [[package]] name = "graphql_query_derive" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e56b093bfda71de1da99758b036f4cc811fd2511c8a76f75680e9ffbd2bb4251" +checksum = "a755cc59cda2641ea3037b4f9f7ef40471c329f55c1fa2db6fa0bb7ae6c1f7ce" dependencies = [ "graphql_client_codegen", "proc-macro2", @@ -3487,29 +3503,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.2.7" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e4728fd124914ad25e99e3d15a9361a879f6620f63cb56bbb08f95abb97a535" -dependencies = [ - "bytes 0.5.6", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http", - "indexmap", - "slab", - "tokio 0.2.25", - "tokio-util 0.3.1", - "tracing", - "tracing-futures", -] - -[[package]] -name = "h2" -version = "0.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57" +checksum = "5ca32592cf21ac7ccab1825cd87f6c9b3d9022c44d086172ed0966bec8af30be" dependencies = [ "bytes 1.1.0", "fnv", @@ -3519,8 +3515,8 @@ dependencies = [ "http", "indexmap", "slab", - "tokio 1.19.2", - "tokio-util 0.7.2", + "tokio", + "tokio-util", "tracing", ] @@ -3532,15 +3528,18 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "hashbrown" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] [[package]] name = "hdrhistogram" -version = "7.5.0" +version = "7.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31672b7011be2c4f7456c4ddbcb40e7e9a4a9fad8efe49a6ebaf5f307d0109c0" +checksum = "7f19b9f54f7c7f55e31401bb647626ce0cf0f67b0004982ce815b3ee72a02aa8" dependencies = [ "base64 0.13.0", "byteorder", @@ -3551,18 +3550,18 @@ dependencies = [ [[package]] name = "headers" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cff78e5788be1e0ab65b04d306b2ed5092c815ec97ec70f4ebd5aee158aa55d" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" dependencies = [ "base64 0.13.0", "bitflags", "bytes 1.1.0", "headers-core", "http", - "httpdate 1.0.2", + "httpdate", "mime 0.3.16", - "sha-1", + "sha1 0.10.5", ] [[package]] @@ -3574,15 +3573,6 @@ dependencies = [ "http", ] -[[package]] -name = "heck" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "heck" version = "0.4.0" @@ -3610,7 +3600,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -3621,17 +3611,7 @@ checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ "bytes 1.1.0", "fnv", - "itoa 1.0.2", -] - -[[package]] -name = "http-body" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" -dependencies = [ - "bytes 0.5.6", - "http", + "itoa 1.0.3", ] [[package]] @@ -3642,7 +3622,7 @@ checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ "bytes 1.1.0", "http", - "pin-project-lite 0.2.9", + "pin-project-lite", ] [[package]] @@ -3653,25 +3633,40 @@ checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" [[package]] name = "http-serde" -version = "1.1.0" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d98b3d9662de70952b14c4840ee0f37e23973542a363e2275f4b9d024ff6cca" +checksum = "0e272971f774ba29341db2f686255ff8a979365a26fb9e4277f6b6d9ec0cdd5e" dependencies = [ "http", "serde", ] [[package]] -name = "httparse" -version = "1.7.1" +name = "http-types" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" +checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad" +dependencies = [ + "anyhow", + "async-channel", + "base64 0.13.0", + "futures-lite", + "http", + "infer", + "pin-project-lite", + "rand 0.7.3", + "serde", + "serde_json", + "serde_qs", + "serde_urlencoded", + "url 2.3.0", +] [[package]] -name = "httpdate" -version = "0.3.2" +name = "httparse" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" @@ -3715,47 +3710,23 @@ dependencies = [ [[package]] name = "hyper" -version = "0.13.10" +version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a6f157065790a3ed2f88679250419b5cdd96e714a0d65f7797fd337186e96bb" -dependencies = [ - "bytes 0.5.6", - "futures-channel", - "futures-core", - "futures-util", - "h2 0.2.7", - "http", - "http-body 0.3.1", - "httparse", - "httpdate 0.3.2", - "itoa 0.4.8", - "pin-project", - "socket2 0.3.19", - "tokio 0.2.25", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper" -version = "0.14.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b26ae0a80afebe130861d90abf98e3814a4f28a4c6ffeb5ab8ebb2be311e0ef2" +checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ "bytes 1.1.0", "futures-channel", "futures-core", "futures-util", - "h2 0.3.13", + "h2", "http", - "http-body 0.4.5", + "http-body", "httparse", - "httpdate 1.0.2", - "itoa 1.0.2", - "pin-project-lite 0.2.9", - "socket2 0.4.4", - "tokio 1.19.2", + "httpdate", + "itoa 1.0.3", + "pin-project-lite", + "socket2", + "tokio", "tower-service", "tracing", "want", @@ -3769,11 +3740,11 @@ checksum = "5f9f7a97316d44c0af9b0301e65010573a853a9fc97046d7331d7f6bc0fd5a64" dependencies = [ "ct-logs", "futures-util", - "hyper 0.14.18", + "hyper 0.14.20", "log 0.4.17", "rustls 0.19.1", "rustls-native-certs", - "tokio 1.19.2", + "tokio", "tokio-rustls 0.22.0", "webpki 0.21.4", ] @@ -3785,9 +3756,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d87c48c02e0dc5e3b849a2041db3029fd066650f8f717c07bf8ed78ccb895cac" dependencies = [ "http", - "hyper 0.14.18", + "hyper 0.14.20", "rustls 0.20.6", - "tokio 1.19.2", + "tokio", "tokio-rustls 0.23.4", ] @@ -3797,23 +3768,23 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper 0.14.18", - "pin-project-lite 0.2.9", - "tokio 1.19.2", + "hyper 0.14.20", + "pin-project-lite", + "tokio", "tokio-io-timeout", ] [[package]] name = "hyper-tls" -version = "0.4.3" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d979acc56dcb5b8dddba3917601745e877576475aa046df3226eabdecef78eed" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 0.5.6", - "hyper 0.13.10", + "bytes 1.1.0", + "hyper 0.14.20", "native-tls", - "tokio 0.2.25", - "tokio-tls 0.3.1", + "tokio", + "tokio-native-tls", ] [[package]] @@ -3825,17 +3796,29 @@ dependencies = [ "base64 0.13.0", "bytes 1.1.0", "http", - "httpdate 1.0.2", + "httpdate", "language-tags 0.3.2", "mime 0.3.16", "percent-encoding 2.1.0", "unicase 2.6.0", ] +[[package]] +name = "iana-time-zone" +version = "0.1.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd911b35d940d2bd0bea0f9100068e5b97b51a1cbe13d13382f132e0365257a0" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "js-sys", + "wasm-bindgen", + "winapi 0.3.9", +] + [[package]] name = "ide-ci" version = "0.1.0" -source = "git+https://github.com/enso-org/ci-build?branch=develop#e283a55fba4b43bb3eeec4ce2d3982d20c8748d2" dependencies = [ "anyhow", "async-compression", @@ -3843,11 +3826,11 @@ dependencies = [ "bincode 1.3.3", "byte-unit", "bytes 1.1.0", - "cached 0.34.0", + "cached", "cfg-if 1.0.0", "chrono", - "clap 3.1.18", - "convert_case 0.5.0", + "clap 3.1.15", + "convert_case 0.6.0", "cron", "data-encoding", "derivative", @@ -3858,24 +3841,24 @@ dependencies = [ "flume", "fn-error-context", "fs_extra", - "futures 0.3.21", + "futures 0.3.24", "futures-util", "glob", "graphql_client", "headers", - "heck 0.4.0", + "heck", "http-serde", "ifmt", "indexmap", "indicatif", - "itertools 0.10.3", + "itertools 0.10.5", "lazy_static", "log 0.4.17", "mime 0.3.16", "new_mime_guess", "nix", "octocrab", - "paste 1.0.7", + "paste 1.0.9", "path-absolutize", "path-slash", "pathdiff", @@ -3887,31 +3870,33 @@ dependencies = [ "quote", "rand 0.8.5", "regex", - "reqwest 0.11.10", + "reqwest", "scopeguard", - "semver 1.0.9", + "semver 1.0.14", "serde", "serde_json", - "serde_yaml 0.9.10", + "serde_yaml 0.9.13", "sha2", "shrinkwraprs 0.3.0", "snafu", "strum", "symlink", "syn", - "sysinfo 0.23.13", + "sysinfo", "tar", "tempfile", - "tokio 1.19.2", - "tokio-util 0.7.2", + "tokio", + "tokio-util", "tracing", "tracing-subscriber", "unicase 2.6.0", - "url 2.2.2", + "url 2.3.0", "uuid 1.1.2", "walkdir", + "warp", "which", "whoami", + "wiremock", "zip 0.6.2", ] @@ -3938,7 +3923,7 @@ dependencies = [ "multi-map", "nalgebra 0.26.2", "ordered-float", - "parser", + "parser-scala", "serde", "serde_json", "span-tree", @@ -4112,15 +4097,21 @@ dependencies = [ [[package]] name = "indicatif" -version = "0.17.0-rc.11" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4017d0ce94b8e91e29d2c78ed891e57e5ec3dc4371820a9d96abab4af09eb8ad" +checksum = "bfddc9561e8baf264e0e45e197fd7696320026eb10a8180340debc27b18f535b" dependencies = [ "console", "number_prefix", "unicode-width", ] +[[package]] +name = "infer" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" + [[package]] name = "instant" version = "0.1.12" @@ -4165,9 +4156,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] @@ -4180,9 +4171,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] name = "itoa" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" +checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" [[package]] name = "jni" @@ -4191,7 +4182,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" dependencies = [ "cesu8", - "combine 4.6.4", + "combine 4.6.6", "jni-sys", "log 0.4.17", "thiserror", @@ -4206,9 +4197,9 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" +checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" dependencies = [ "libc", ] @@ -4232,7 +4223,7 @@ dependencies = [ "enso-shapely", "enso-web", "failure", - "futures 0.3.21", + "futures 0.3.24", "serde", "serde_json", "shrinkwraprs 0.3.0", @@ -4240,9 +4231,9 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "8.1.0" +version = "8.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc9051c17f81bae79440afa041b3a278e1de71bfb96d32454b477fd4703ccb6f" +checksum = "1aa4b4af834c6cfd35d8763d359661b90f2e45d8f750a0849156c7f4671af09c" dependencies = [ "base64 0.13.0", "pem", @@ -4341,15 +4332,15 @@ checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" [[package]] name = "libm" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" +checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565" [[package]] name = "linked-hash-map" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "lock_api" @@ -4362,9 +4353,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" dependencies = [ "autocfg 1.1.0", "scopeguard", @@ -4393,12 +4384,12 @@ dependencies = [ name = "logstat" version = "0.1.0" dependencies = [ - "clap 3.1.18", + "clap 3.1.15", "enso-prelude", "lazy_static", "regex", - "time 0.3.9", - "tokio 1.19.2", + "time 0.3.14", + "tokio", "tokio-stream", ] @@ -4449,11 +4440,11 @@ checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" [[package]] name = "md-5" -version = "0.10.1" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658646b21e0b72f7866c7038ab086d3d5e1cd6271f060fd37defb241949d0582" +checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -4504,9 +4495,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.5.1" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082" +checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" dependencies = [ "adler", ] @@ -4532,9 +4523,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799" +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" dependencies = [ "libc", "log 0.4.17", @@ -4587,6 +4578,24 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bba551d6d795f74a01767577ea8339560bf0a65354e0417b7e915ed608443d46" +[[package]] +name = "multipart" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182" +dependencies = [ + "buf_redux", + "httparse", + "log 0.4.17", + "mime 0.3.16", + "mime_guess", + "quick-error", + "rand 0.8.5", + "safemem", + "tempfile", + "twoway", +] + [[package]] name = "nalgebra" version = "0.21.1" @@ -4701,9 +4710,9 @@ checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" [[package]] name = "ntapi" -version = "0.3.7" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f" +checksum = "bc51db7b362b205941f71232e56c625156eb9a929f8cf74a428fd5bc094a4afc" dependencies = [ "winapi 0.3.9", ] @@ -4715,10 +4724,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" dependencies = [ "num-bigint", - "num-complex 0.4.1", + "num-complex 0.4.2", "num-integer", "num-iter", - "num-rational 0.4.0", + "num-rational 0.4.1", "num-traits", ] @@ -4756,9 +4765,9 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fbc387afefefd5e9e39493299f3069e14a140dd34dc19b4c1c1a8fddb6a790" +checksum = "7ae39348c8bc5fbd7f40c727a9925f03517afd2ab27d46702108b6a7e5414c19" dependencies = [ "num-traits", ] @@ -4808,9 +4817,9 @@ dependencies = [ [[package]] name = "num-rational" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" dependencies = [ "autocfg 1.1.0", "num-bigint", @@ -4885,40 +4894,41 @@ dependencies = [ [[package]] name = "object" -version = "0.28.4" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424" +checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" dependencies = [ "memchr", ] [[package]] name = "octocrab" -version = "0.16.0" -source = "git+https://github.com/enso-org/octocrab#2a104c9673f45d48ae0438d0b05bcd905eb1334b" +version = "0.17.0" +source = "git+https://github.com/enso-org/octocrab#6bbc04b927cab3880bddd697911fb56bfa78b69e" dependencies = [ "arc-swap", "async-trait", "base64 0.13.0", "bytes 1.1.0", + "cfg-if 1.0.0", "chrono", "hyperx", "jsonwebtoken", "once_cell", - "reqwest 0.11.10", + "reqwest", "secrecy", "serde", "serde_json", "serde_path_to_error", "snafu", - "url 2.2.2", + "url 2.3.0", ] [[package]] name = "once_cell" -version = "1.12.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "oorandom" @@ -4940,9 +4950,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.40" +version = "0.10.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb81a6430ac911acb25fe5ac8f1d2af1b4ea8a4fdfda0f1ee4292af2e2d8eb0e" +checksum = "12fc0523e3bd51a692c8850d075d74dc062ccf251c0110668cbd921917118a13" dependencies = [ "bitflags", "cfg-if 1.0.0", @@ -4972,9 +4982,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.73" +version = "0.9.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5fd19fb3e0a8191c1e34935718976a3e70c112ab9a24af6d7cadccd9d90bc0" +checksum = "5230151e44c0f05157effb743e8d517472843121cf9243e8b81393edb5acd9ce" dependencies = [ "autocfg 1.1.0", "cc", @@ -4985,35 +4995,34 @@ dependencies = [ [[package]] name = "ordered-float" -version = "3.0.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96bcbab4bfea7a59c2c0fe47211a1ac4e3e96bea6eb446d704f310bc5c732ae2" +checksum = "98ffdb14730ed2ef599c65810c15b000896e21e8776b512de0db0c3d7335cc2a" dependencies = [ "num-traits", ] [[package]] name = "os_str_bytes" -version = "6.1.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa" +checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" [[package]] name = "ouroboros" -version = "0.15.0" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f31a3b678685b150cba82b702dcdc5e155893f63610cf388d30cd988d4ca2bf" +checksum = "dfbb50b356159620db6ac971c6d5c9ab788c9cc38a6f49619fca2a27acb062ca" dependencies = [ "aliasable", "ouroboros_macro", - "stable_deref_trait", ] [[package]] name = "ouroboros_macro" -version = "0.15.0" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084fd65d5dd8b3772edccb5ffd1e4b7eba43897ecd0f9401e330e8c542959408" +checksum = "4a0d9d1a6191c4f391f87219d1ea42b23f09ee84d64763cd05ee6ea88d9f384d" dependencies = [ "Inflector", "proc-macro-error", @@ -5024,9 +5033,9 @@ dependencies = [ [[package]] name = "owned_ttf_parser" -version = "0.15.1" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ef1a404ae479dd6906f4fa2c88b3c94028f1284beb42a47c183a7c27ee9a3e" +checksum = "05e6affeb1632d6ff6a23d2cd40ffed138e82f1532571a26f527c8a284bb2fbb" dependencies = [ "ttf-parser", ] @@ -5050,11 +5059,11 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "lock_api 0.4.7", + "lock_api 0.4.9", "parking_lot_core 0.9.3", ] @@ -5081,32 +5090,32 @@ checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.13", - "smallvec 1.8.0", + "redox_syscall 0.2.16", + "smallvec 1.9.0", "windows-sys", ] [[package]] -name = "parser" +name = "parser-scala" version = "0.1.0" dependencies = [ "ast", - "bytes 0.5.6", + "bytes 1.1.0", "console_error_panic_hook", - "enso-build-utilities", "enso-data-structures", "enso-prelude", "enso-profiler", "enso-text", "failure", - "futures 0.3.21", + "futures 0.3.24", + "ide-ci", "js-sys", "matches", - "reqwest 0.10.10", + "reqwest", "serde", "serde_json", "shrinkwraprs 0.2.3", - "tokio 0.2.25", + "tokio", "uuid 0.8.2", "wasm-bindgen", "wasm-bindgen-test", @@ -5120,7 +5129,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d791538a6dcc1e7cb7fe6f6b58aca40e7f79403c45b2bc274008b5e647af1d8" dependencies = [ "base64ct", - "rand_core 0.6.3", + "rand_core 0.6.4", "subtle", ] @@ -5136,9 +5145,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.7" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" +checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "paste-impl" @@ -5175,9 +5184,9 @@ dependencies = [ [[package]] name = "path-slash" -version = "0.1.4" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cacbb3c4ff353b534a67fb8d7524d00229da4cb1dc8c79f4db96e375ab5b619" +checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" [[package]] name = "pathdiff" @@ -5191,7 +5200,7 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271779f35b581956db91a3e55737327a03aa051e90b1c47aeb189508533adfd7" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", "hmac", "password-hash", "sha2", @@ -5199,9 +5208,9 @@ dependencies = [ [[package]] name = "pem" -version = "1.0.2" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9a3b09a20e374558580a4914d3b7d89bd61b954a5a5e1dcbea98753addb1947" +checksum = "03c64931a1a212348ec4f3b4362585eca7159d0d09cbdf4a7f74f02173596fd4" dependencies = [ "base64 0.13.0", ] @@ -5220,30 +5229,24 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pin-project" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ "proc-macro2", "quote", "syn", ] -[[package]] -name = "pin-project-lite" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" - [[package]] name = "pin-project-lite" version = "0.2.9" @@ -5264,18 +5267,18 @@ checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" [[package]] name = "platforms" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86d1db500905601725f5c3629a5815a2ce7611fe063de279964b451f3edb3532" +checksum = "d8ec293fd25f7fcfeb7c70129241419a62c6200a26a725f680aff07c91d0ed05" dependencies = [ "serde", ] [[package]] name = "plotters" -version = "0.3.1" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a3fd9ec30b9749ce28cd91f255d569591cdf937fe280c312143e3c4bad6f2a" +checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" dependencies = [ "num-traits", "plotters-backend", @@ -5286,25 +5289,26 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d88417318da0eaf0fdcdb51a0ee6c3bed624333bff8f946733049380be67ac1c" +checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" [[package]] name = "plotters-svg" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521fa9638fa597e1dc53e9412a4f9cefb01187ee1f7413076f9e6749e2885ba9" +checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" dependencies = [ "plotters-backend", ] [[package]] name = "polling" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259" +checksum = "899b00b9c8ab553c743b3e11e87c5c7d423b2a2de229ba95b24a756344748011" dependencies = [ + "autocfg 1.1.0", "cfg-if 1.0.0", "libc", "log 0.4.17", @@ -5365,10 +5369,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.1.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" +checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" dependencies = [ + "once_cell", "thiserror", "toml", ] @@ -5405,18 +5410,18 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro2" -version = "1.0.40" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" +checksum = "7bd7356a8122b6c4a24a82b278680c73357984ca2fc79a0f9fa6dea7dced7c58" dependencies = [ "unicode-ident", ] [[package]] name = "prost" -version = "0.10.4" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71adf41db68aa0daaefc69bb30bcd68ded9b9abaad5d1fbb6304c4fb390e083e" +checksum = "399c3c31cdec40583bb68f0b18403400d01ec4289c383aa047560439952c4dd7" dependencies = [ "bytes 1.1.0", "prost-derive", @@ -5424,12 +5429,12 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.10.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b670f45da57fb8542ebdbb6105a925fe571b67f9e7ed9f47a06a84e72b4e7cc" +checksum = "7345d5f0e08c0536d7ac7229952590239e77abf0a0100a1b1d890add6ea96364" dependencies = [ "anyhow", - "itertools 0.10.3", + "itertools 0.10.5", "proc-macro2", "quote", "syn", @@ -5437,9 +5442,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.10.1" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d0a014229361011dc8e69c8a1ec6c2e8d0f2af7c91e3ea3f5b2170298461e68" +checksum = "4dfaa718ad76a44b3415e6c4d53b17c8f99160dcb3a99b10470fce8ad43f6e3e" dependencies = [ "bytes 1.1.0", "prost", @@ -5447,9 +5452,9 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6" +checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63" dependencies = [ "bitflags", "getopts", @@ -5472,20 +5477,11 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" -[[package]] -name = "quickcheck" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" -dependencies = [ - "rand 0.8.5", -] - [[package]] name = "quote" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ "proc-macro2", ] @@ -5530,7 +5526,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha 0.3.1", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -5560,7 +5556,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -5589,9 +5585,9 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.7", ] @@ -5712,7 +5708,7 @@ checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", "num_cpus", ] @@ -5733,9 +5729,9 @@ checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "redox_syscall" -version = "0.2.13" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ "bitflags", ] @@ -5747,7 +5743,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ "getrandom 0.2.7", - "redox_syscall 0.2.13", + "redox_syscall 0.2.16", "thiserror", ] @@ -5788,79 +5784,53 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.10.10" +version = "0.11.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0718f81a8e14c4dbb3b34cf23dc6aaf9ab8a0dfec160c534b3dbca1aaa21f47c" -dependencies = [ - "base64 0.13.0", - "bytes 0.5.6", - "encoding_rs", - "futures-core", - "futures-util", - "http", - "http-body 0.3.1", - "hyper 0.13.10", - "hyper-tls", - "ipnet", - "js-sys", - "lazy_static", - "log 0.4.17", - "mime 0.3.16", - "mime_guess", - "native-tls", - "percent-encoding 2.1.0", - "pin-project-lite 0.2.9", - "serde", - "serde_json", - "serde_urlencoded", - "tokio 0.2.25", - "tokio-tls 0.3.1", - "url 2.2.2", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "winreg 0.7.0", -] - -[[package]] -name = "reqwest" -version = "0.11.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46a1f7aa4f35e5e8b4160449f51afc758f0ce6454315a9fa7d0d113e958c41eb" +checksum = "431949c384f4e2ae07605ccaa56d1d9d2ecdb5cadd4f9577ccfab29f2e5149fc" dependencies = [ "base64 0.13.0", "bytes 1.1.0", "encoding_rs", "futures-core", "futures-util", - "h2 0.3.13", + "h2", "http", - "http-body 0.4.5", - "hyper 0.14.18", + "http-body", + "hyper 0.14.20", "hyper-rustls 0.23.0", + "hyper-tls", "ipnet", "js-sys", - "lazy_static", "log 0.4.17", "mime 0.3.16", + "native-tls", + "once_cell", "percent-encoding 2.1.0", - "pin-project-lite 0.2.9", + "pin-project-lite", "rustls 0.20.6", - "rustls-pemfile", + "rustls-pemfile 1.0.1", "serde", "serde_json", "serde_urlencoded", - "tokio 1.19.2", + "tokio", + "tokio-native-tls", "tokio-rustls 0.23.4", - "tokio-util 0.6.10", - "url 2.2.2", + "tokio-util", + "tower-service", + "url 2.3.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "webpki-roots", - "winreg 0.10.1", + "winreg", ] +[[package]] +name = "retain_mut" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" + [[package]] name = "ring" version = "0.16.20" @@ -5903,7 +5873,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.9", + "semver 1.0.14", ] [[package]] @@ -5945,18 +5915,27 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "0.3.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee86d63972a7c661d1536fefe8c3c8407321c3df668891286de28abcd087360" +checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9" +dependencies = [ + "base64 0.13.0", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0864aeff53f8c05aa08d86e5ef839d3dfcf07aeba2db32f12db0ef716e87bd55" dependencies = [ "base64 0.13.0", ] [[package]] name = "rustversion" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" +checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" [[package]] name = "rustybuzz" @@ -5966,7 +5945,7 @@ checksum = "a617c811f5c9a7060fe511d35d13bf5b9f0463ce36d63ce666d05779df2b4eba" dependencies = [ "bitflags", "bytemuck", - "smallvec 1.8.0", + "smallvec 1.9.0", "ttf-parser", "unicode-bidi-mirroring", "unicode-ccc", @@ -5976,9 +5955,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] name = "safemem" @@ -6048,9 +6027,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dc14f172faf8a0194a3aded622712b0de276821addc574fa54fc0a1167e10dc" +checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c" dependencies = [ "bitflags", "core-foundation", @@ -6086,9 +6065,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.9" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cb243bdfdb5936c8dc3c45762a19d12ab4550cdc753bc247637d4ec35a040fd" +checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" dependencies = [ "serde", ] @@ -6101,9 +6080,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860" +checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" dependencies = [ "serde_derive", ] @@ -6120,9 +6099,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00" +checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", @@ -6131,24 +6110,35 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.81" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" +checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074" dependencies = [ - "itoa 1.0.2", + "itoa 1.0.3", "ryu", "serde", ] [[package]] name = "serde_path_to_error" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7868ad3b8196a8a0aea99a8220b124278ee5320a55e4fde97794b6f85b1a377" +checksum = "184c643044780f7ceb59104cef98a5a6f12cb2288a7bc701ab93a362b49fd47d" dependencies = [ "serde", ] +[[package]] +name = "serde_qs" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" +dependencies = [ + "percent-encoding 2.1.0", + "serde", + "thiserror", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -6156,16 +6146,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.2", + "itoa 1.0.3", "ryu", "serde", ] [[package]] name = "serde_yaml" -version = "0.8.24" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707d15895415db6628332b737c838b88c598522e4dc70647e59b72312924aebc" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" dependencies = [ "indexmap", "ryu", @@ -6175,12 +6165,12 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.10" +version = "0.9.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a09f551ccc8210268ef848f0bab37b306e87b85b2e017b899e7fb815f5aed62" +checksum = "8613d593412a0deb7bbd8de9d908efff5a0cb9ccd8f62c641e7b2ed2f57291d1" dependencies = [ "indexmap", - "itoa 1.0.2", + "itoa 1.0.3", "ryu", "serde", "unsafe-libyaml", @@ -6194,7 +6184,7 @@ checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -6208,13 +6198,13 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.1" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77f4e7f65455545c2153c1253d25056825e77ee2533f0e41deb65a93a34852f" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -6225,13 +6215,13 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" [[package]] name = "sha2" -version = "0.10.2" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -6312,26 +6302,29 @@ dependencies = [ "approx 0.4.0", "num-complex 0.3.1", "num-traits", - "paste 1.0.7", + "paste 1.0.9", ] [[package]] name = "simple_asn1" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a762b1c38b9b990c694b9c2f8abe3372ce6a9ceaae6bca39cfc46e054f45745" +checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ "num-bigint", "num-traits", "thiserror", - "time 0.3.9", + "time 0.3.14", ] [[package]] name = "slab" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg 1.1.0", +] [[package]] name = "smallvec" @@ -6344,9 +6337,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" [[package]] name = "snafu" @@ -6365,7 +6358,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "410b26ed97440d90ced3e2488c868d56a86e2064f5d7d6f417909b286afe25e5" dependencies = [ - "heck 0.4.0", + "heck", "proc-macro2", "quote", "syn", @@ -6373,20 +6366,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.3.19" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "winapi 0.3.9", -] - -[[package]] -name = "socket2" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" dependencies = [ "libc", "winapi 0.3.9", @@ -6394,18 +6376,18 @@ dependencies = [ [[package]] name = "sourcemap" -version = "6.0.2" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ca89636b276071e7276488131f531dbf43ad1c19bc4bd5a04f6a0ce1ddc138" +checksum = "58ad6f449ac2dc2eaa01e766408b76b55fc0a20c842b63aa11a8448caa72f50b" dependencies = [ - "base64 0.11.0", + "base64 0.13.0", "if_chain", "lazy_static", "regex", "rustc_version 0.2.3", "serde", "serde_json", - "url 2.2.2", + "url 2.3.0", ] [[package]] @@ -6418,7 +6400,7 @@ dependencies = [ "enso-profiler", "enso-text", "failure", - "parser", + "parser-scala", "wasm-bindgen-test", ] @@ -6430,19 +6412,13 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] name = "spin" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c530c2b0d0bf8b69304b39fe2001993e267461948b890cd037d8ad4293fa1a0d" +checksum = "7f6002a767bff9e83f8eeecf883ecb8011875a21ae8da43bffb817a57e78cc09" dependencies = [ - "lock_api 0.4.7", + "lock_api 0.4.9", ] -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - [[package]] name = "strsim" version = "0.10.0" @@ -6451,20 +6427,20 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "strum" -version = "0.24.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96acfc1b70604b8b2f1ffa4c57e59176c7dbb05d556c71ecd2f5498a1dee7f8" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.24.0" +version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6878079b17446e4d3eba6192bb0a2950d5b14f0ed8424b852310e5a94345d0ef" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ - "heck 0.4.0", + "heck", "proc-macro2", "quote", "rustversion", @@ -6485,9 +6461,9 @@ checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a" [[package]] name = "syn" -version = "1.0.98" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" +checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2" dependencies = [ "proc-macro2", "quote", @@ -6514,24 +6490,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.23.13" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3977ec2e0520829be45c8a2df70db2bf364714d8a748316a10c3c35d4d2b01c9" -dependencies = [ - "cfg-if 1.0.0", - "core-foundation-sys", - "libc", - "ntapi", - "once_cell", - "rayon", - "winapi 0.3.9", -] - -[[package]] -name = "sysinfo" -version = "0.25.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71eb43e528fdc239f08717ec2a378fdb017dddbc3412de15fff527554591a66c" +checksum = "7890fff842b8db56f2033ebee8f6efe1921475c3830c115995552914fb967580" dependencies = [ "cfg-if 1.0.0", "core-foundation-sys", @@ -6562,7 +6523,7 @@ dependencies = [ "cfg-if 1.0.0", "fastrand", "libc", - "redox_syscall 0.2.13", + "redox_syscall 0.2.16", "remove_dir_all", "winapi 0.3.9", ] @@ -6612,18 +6573,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.31" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" +checksum = "0a99cb8c4b9a8ef0e7907cd3b617cc8dc04d571c4e73c8ae403d80ac160bb122" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.31" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" +checksum = "3a891860d3c8d66fec8e73ddb3765f90082374dbaaa833407b904a94f1a7eb43" dependencies = [ "proc-macro2", "quote", @@ -6652,14 +6613,13 @@ dependencies = [ [[package]] name = "time" -version = "0.3.9" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd" +checksum = "3c3f9a28b618c3a6b9251b6908e9c99e04b9e5c02e6581ccbb67d59c34ef7f9b" dependencies = [ - "itoa 1.0.2", + "itoa 1.0.3", "libc", "num_threads", - "quickcheck", "time-macros", ] @@ -6696,40 +6656,22 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "0.2.25" +version = "1.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6703a273949a90131b290be1fe7b039d0fc884aa1935860dfcbe056f28cd8092" -dependencies = [ - "bytes 0.5.6", - "fnv", - "futures-core", - "iovec", - "lazy_static", - "memchr", - "mio 0.6.23", - "num_cpus", - "pin-project-lite 0.1.12", - "slab", - "tokio-macros 0.2.6", -] - -[[package]] -name = "tokio" -version = "1.19.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c51a52ed6686dd62c320f9b89299e9dfb46f730c7a48e635c19f21d116cb1439" +checksum = "7a8325f63a7d4774dd041e363b2409ed1c5cbbd0f867795e661df066b2b0a581" dependencies = [ + "autocfg 1.1.0", "bytes 1.1.0", "libc", "memchr", - "mio 0.8.3", + "mio 0.8.4", "num_cpus", "once_cell", - "parking_lot 0.12.0", - "pin-project-lite 0.2.9", + "parking_lot 0.12.1", + "pin-project-lite", "signal-hook-registry", - "socket2 0.4.4", - "tokio-macros 1.7.0", + "socket2", + "tokio-macros", "tracing", "winapi 0.3.9", ] @@ -6772,26 +6714,15 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" dependencies = [ - "pin-project-lite 0.2.9", - "tokio 1.19.2", + "pin-project-lite", + "tokio", ] [[package]] name = "tokio-macros" -version = "0.2.6" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tokio-macros" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" +checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" dependencies = [ "proc-macro2", "quote", @@ -6805,7 +6736,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" dependencies = [ "native-tls", - "tokio 1.19.2", + "tokio", ] [[package]] @@ -6834,7 +6765,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" dependencies = [ "rustls 0.19.1", - "tokio 1.19.2", + "tokio", "webpki 0.21.4", ] @@ -6845,19 +6776,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ "rustls 0.20.6", - "tokio 1.19.2", + "tokio", "webpki 0.22.0", ] [[package]] name = "tokio-stream" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df54d54117d6fdc4e4fea40fe1e4e566b3505700e148a6827e59b34b0d2600d9" +checksum = "f6edf2d6bc038a43d31353570e27270603f4648d18f5ed10c0e179abe43255af" dependencies = [ "futures-core", - "pin-project-lite 0.2.9", - "tokio 1.19.2", + "pin-project-lite", + "tokio", ] [[package]] @@ -6896,57 +6827,32 @@ dependencies = [ ] [[package]] -name = "tokio-tls" -version = "0.3.1" +name = "tokio-tungstenite" +version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a70f4fcd7b3b24fb194f837560168208f669ca8cb70d0c4b862944452396343" +checksum = "f714dd15bead90401d77e04243611caec13726c2408afd5b31901dfcdcb3b181" dependencies = [ - "native-tls", - "tokio 0.2.25", -] - -[[package]] -name = "tokio-util" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be8242891f2b6cbef26a2d7e8605133c2c554cd35b3e4948ea892d6d68436499" -dependencies = [ - "bytes 0.5.6", - "futures-core", - "futures-sink", + "futures-util", "log 0.4.17", - "pin-project-lite 0.1.12", - "tokio 0.2.25", + "tokio", + "tungstenite", ] [[package]] name = "tokio-util" -version = "0.6.10" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" -dependencies = [ - "bytes 1.1.0", - "futures-core", - "futures-sink", - "log 0.4.17", - "pin-project-lite 0.2.9", - "tokio 1.19.2", -] - -[[package]] -name = "tokio-util" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f988a1a1adc2fb21f9c12aa96441da33a1728193ae0b95d2be22dbd17fcb4e5c" +checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" dependencies = [ "bytes 1.1.0", "futures-core", "futures-io", "futures-sink", "futures-util", - "pin-project-lite 0.2.9", + "hashbrown", + "pin-project-lite", "slab", - "tokio 1.19.2", + "tokio", "tracing", ] @@ -6961,9 +6867,9 @@ dependencies = [ [[package]] name = "tonic" -version = "0.7.2" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5be9d60db39854b30b835107500cf0aca0b0d14d6e1c3de124217c23a29c2ddb" +checksum = "11cd56bdb54ef93935a6a79dbd1d91f1ebd4c64150fd61654031fd6b8b775c91" dependencies = [ "async-stream", "async-trait", @@ -6972,18 +6878,18 @@ dependencies = [ "bytes 1.1.0", "futures-core", "futures-util", - "h2 0.3.13", + "h2", "http", - "http-body 0.4.5", - "hyper 0.14.18", + "http-body", + "hyper 0.14.20", "hyper-timeout", "percent-encoding 2.1.0", "pin-project", "prost", "prost-derive", - "tokio 1.19.2", + "tokio", "tokio-stream", - "tokio-util 0.7.2", + "tokio-util", "tower", "tower-layer", "tower-service", @@ -6993,19 +6899,19 @@ dependencies = [ [[package]] name = "tower" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a89fd63ad6adf737582df5db40d286574513c69a11dac5214dc3b5603d6713e" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", "indexmap", "pin-project", - "pin-project-lite 0.2.9", + "pin-project-lite", "rand 0.8.5", "slab", - "tokio 1.19.2", - "tokio-util 0.7.2", + "tokio", + "tokio-util", "tower-layer", "tower-service", "tracing", @@ -7013,18 +6919,18 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d342c6d58709c0a6d48d48dabbb62d4ef955cf5f0f3bbfd845838e7ae88dbae" +checksum = "3c530c8675c1dbf98facee631536fa116b5fb6382d7dd6dc1b118d970eafe3ba" dependencies = [ "bitflags", "bytes 1.1.0", "futures-core", "futures-util", "http", - "http-body 0.4.5", + "http-body", "http-range-header", - "pin-project-lite 0.2.9", + "pin-project-lite", "tower", "tower-layer", "tower-service", @@ -7038,28 +6944,28 @@ checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62" [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.34" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09" +checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307" dependencies = [ "cfg-if 1.0.0", "log 0.4.17", - "pin-project-lite 0.2.9", + "pin-project-lite", "tracing-attributes", "tracing-core", ] [[package]] name = "tracing-attributes" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" +checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2" dependencies = [ "proc-macro2", "quote", @@ -7068,11 +6974,11 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.26" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f54c8ca710e81886d498c2fd3331b56c93aa248d49de2222ad2742247c60072f" +checksum = "5aeea4303076558a00714b823f9ad67d58a3bbda1df83d8827d21193156e22f7" dependencies = [ - "lazy_static", + "once_cell", "valuable", ] @@ -7099,16 +7005,16 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.11" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bc28f93baff38037f64e6f43d34cfa1605f27a49c34e8a04c5e78b0babf2596" +checksum = "60db860322da191b40952ad9affe65ea23e7dd6a5c442c2c42865810c6ab8e6b" dependencies = [ "ansi_term", - "lazy_static", "matchers", + "once_cell", "regex", "sharded-slab", - "smallvec 1.8.0", + "smallvec 1.9.0", "thread_local", "tracing", "tracing-core", @@ -7144,6 +7050,34 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b3e06c9b9d80ed6b745c7159c40b311ad2916abb34a49e9be2653b90db0d8dd" +[[package]] +name = "tungstenite" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" +dependencies = [ + "base64 0.13.0", + "byteorder", + "bytes 1.1.0", + "http", + "httparse", + "log 0.4.17", + "rand 0.8.5", + "sha-1", + "thiserror", + "url 2.3.0", + "utf-8", +] + +[[package]] +name = "twoway" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1" +dependencies = [ + "memchr", +] + [[package]] name = "typeable" version = "0.1.2" @@ -7200,42 +7134,42 @@ checksum = "07547e3ee45e28326cc23faac56d44f58f16ab23e413db526debce3b0bfd2742" [[package]] name = "unicode-ident" -version = "1.0.0" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee" +checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd" [[package]] name = "unicode-normalization" -version = "0.1.19" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" dependencies = [ "tinyvec", ] [[package]] name = "unicode-script" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58dd944fd05f2f0b5c674917aea8a4df6af84f2d8de3fe8d988b95d28fb8fb09" +checksum = "7d817255e1bed6dfd4ca47258685d14d2bdcfbc64fdc9e3819bd5848057b8ecc" [[package]] name = "unicode-segmentation" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" +checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" [[package]] name = "unicode-width" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "unicode-xid" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "unreachable" @@ -7248,9 +7182,9 @@ dependencies = [ [[package]] name = "unsafe-libyaml" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "931179334a56395bcf64ba5e0ff56781381c1a5832178280c7d7f91d1679aeb0" +checksum = "c1e5fa573d8ac5f1a856f8d7be41d390ee973daf97c806b2c1a465e4e1406e68" [[package]] name = "untrusted" @@ -7271,22 +7205,27 @@ dependencies = [ [[package]] name = "url" -version = "2.2.2" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +checksum = "22fe195a4f217c25b25cb5058ced57059824a678474874038dc88d211bf508d3" dependencies = [ "form_urlencoded", "idna 0.2.3", - "matches", "percent-encoding 2.1.0", "serde", ] [[package]] name = "urlencoding" -version = "2.1.0" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b90931029ab9b034b300b797048cf23723400aa757e8a2bfb9d748102f9821" +checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9" + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8-width" @@ -7388,6 +7327,37 @@ dependencies = [ "try-lock", ] +[[package]] +name = "warp" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed7b8be92646fc3d18b06147664ebc5f48d222686cb11a8755e561a735aacc6d" +dependencies = [ + "bytes 1.1.0", + "futures-channel", + "futures-util", + "headers", + "http", + "hyper 0.14.20", + "log 0.4.17", + "mime 0.3.16", + "mime_guess", + "multipart", + "percent-encoding 2.1.0", + "pin-project", + "rustls-pemfile 0.2.1", + "scoped-tls", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-stream", + "tokio-tungstenite", + "tokio-util", + "tower-service", + "tracing", +] + [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" @@ -7536,9 +7506,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.3" +version = "0.22.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d8de8415c823c8abd270ad483c6feeac771fad964890779f9a8cb24fbbc1bf" +checksum = "368bfe657969fb01238bb756d351dcade285e0f6fcbd36dcb23359a5169975be" dependencies = [ "webpki 0.22.0", ] @@ -7562,16 +7532,16 @@ dependencies = [ "tokio-io", "tokio-reactor", "tokio-tcp", - "tokio-tls 0.2.1", + "tokio-tls", "unicase 1.4.2", "url 1.7.2", ] [[package]] name = "websocket-codec" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72154d7f42457a99b2832ff093a22a6b303d88c6fe87ca975515cc6c7bc8d21d" +checksum = "2108c9c18a6e746addc085c18cedb66b672e8ffea6a993712decc295b0d8ae55" dependencies = [ "base64 0.13.0", "byteorder", @@ -7579,24 +7549,24 @@ dependencies = [ "httparse", "rand 0.8.5", "sha1 0.6.1", - "tokio-util 0.7.2", + "tokio-util", ] [[package]] name = "websocket-lite" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44a2fea74fd5c7e2720dfd619bf029b46acef012cc619793d6d76d29c0ba8c14" +checksum = "1d6cae39139c6e837afebd915935e7adc8af5c28425935de606d0e8c9d3268f6" dependencies = [ "base64 0.13.0", "bytes 1.1.0", - "futures 0.3.21", + "futures 0.3.24", "native-tls", "rand 0.8.5", - "tokio 1.19.2", + "tokio", "tokio-native-tls", - "tokio-util 0.7.2", - "url 2.2.2", + "tokio-util", + "url 2.3.0", "websocket-codec", ] @@ -7621,21 +7591,22 @@ dependencies = [ [[package]] name = "which" -version = "4.2.5" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae" +checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b" dependencies = [ "either", - "lazy_static", "libc", + "once_cell", ] [[package]] name = "whoami" -version = "1.2.1" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524b58fa5a20a2fb3014dd6358b70e6579692a56ef6fce928834e488f42f65e8" +checksum = "d6631b6a2fd59b1841b622e8f1a7ad241ef0a46f2d580464ce8140ac94cbd571" dependencies = [ + "bumpalo", "wasm-bindgen", "web-sys", ] @@ -7726,15 +7697,6 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" -[[package]] -name = "winreg" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" -dependencies = [ - "winapi 0.3.9", -] - [[package]] name = "winreg" version = "0.10.1" @@ -7744,6 +7706,28 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "wiremock" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc3c7b7557dbfdad6431b5a51196c9110cef9d83f6a9b26699f35cdc0ae113ec" +dependencies = [ + "assert-json-diff", + "async-trait", + "base64 0.13.0", + "deadpool", + "futures 0.3.24", + "futures-timer", + "http-types", + "hyper 0.14.20", + "log 0.4.17", + "once_cell", + "regex", + "serde", + "serde_json", + "tokio", +] + [[package]] name = "ws2_32-sys" version = "0.2.1" @@ -7759,15 +7743,15 @@ name = "wstest" version = "0.1.0" dependencies = [ "base64 0.13.0", - "clap 3.1.18", + "clap 3.1.15", "either", "enso-prelude", - "futures 0.3.21", + "futures 0.3.24", "regex", - "time 0.3.9", - "tokio 1.19.2", + "time 0.3.14", + "tokio", "tokio-stream", - "url 2.2.2", + "url 2.3.0", "websocket-lite", ] @@ -7809,9 +7793,9 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.5.5" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94693807d016b2f2d2e14420eb3bfcca689311ff775dcf113d74ea624b7cdf07" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" [[package]] name = "zip" @@ -7838,20 +7822,20 @@ dependencies = [ "bzip2", "constant_time_eq", "crc32fast", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", "flate2", "hmac", "pbkdf2", - "sha1 0.10.1", - "time 0.3.9", + "sha1 0.10.5", + "time 0.3.14", "zstd", ] [[package]] name = "zip-extensions" -version = "0.4.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9adcf027b355870f62cabaed021f9028231d2d84cad6e30a7abdaa4dc0390edd" +checksum = "a64c3c977bc3434ce2d4bcea8ad3c644672de0f2c402b72b9171ca80a8885d14" dependencies = [ "zip 0.5.13", ] diff --git a/Cargo.toml b/Cargo.toml index f7bad6ec31..867be911a7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,9 +5,9 @@ members = [ "app/gui", "app/gui/enso-profiler-enso-data", - "build", + "build/cli", "build/enso-formatter", - "build/rust-scripts", + "build/deprecated/rust-scripts", "lib/rust/*", "lib/rust/parser/src/syntax/tree/visitor", "lib/rust/parser/jni", @@ -58,12 +58,13 @@ inherits = "test" opt-level = 2 [profile.buildscript] -inherits = "release" -opt-level = 2 +inherits = "dev" +opt-level = 1 lto = false -debug = false +debug = true debug-assertions = true -#[patch."https://github.com/enso-org/ci-build"] -#enso-build = { path = '../ci-build/build' } -#enso-build-cli = { path = '../ci-build/cli' } -#ide-ci = { path = '../ci-build/ci_utils' } + +[workspace.dependencies] +tokio = { version = "=1.20.1", features = ["full", "tracing"] } +console-subscriber = "=0.1.7" +nix = "=0.24.1" # DO NOT BUMP UNTIL NIGHTLY IS UPDATED. Otherwise, it brings too new libc. diff --git a/app/gui/Cargo.toml b/app/gui/Cargo.toml index c3569cbc86..a8447a59ee 100644 --- a/app/gui/Cargo.toml +++ b/app/gui/Cargo.toml @@ -34,7 +34,7 @@ ide-view = { path = "view" } ide-view-component-group = { path = "view/component-browser/component-group" } engine-protocol = { path = "controller/engine-protocol" } json-rpc = { path = "../../lib/rust/json-rpc" } -parser = { path = "language/parser" } +parser-scala = { path = "language/parser" } span-tree = { path = "language/span-tree" } bimap = { version = "0.4.0" } console_error_panic_hook = { version = "0.1.6" } diff --git a/app/gui/analytics/src/lib.rs b/app/gui/analytics/src/lib.rs index 226dccb73b..c4afb34bb4 100644 --- a/app/gui/analytics/src/lib.rs +++ b/app/gui/analytics/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/app/gui/config/src/lib.rs b/app/gui/config/src/lib.rs index 63b8e3b539..9e2a834b91 100644 --- a/app/gui/config/src/lib.rs +++ b/app/gui/config/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(trivial_casts)] diff --git a/app/gui/controller/double-representation/Cargo.toml b/app/gui/controller/double-representation/Cargo.toml index 76ac5113cb..f593f02b32 100644 --- a/app/gui/controller/double-representation/Cargo.toml +++ b/app/gui/controller/double-representation/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["cdylib", "rlib"] [dependencies] ast = { version = "0.1.0", path = "../../language/ast/impl" } engine-protocol = { version = "0.1.0", path = "../engine-protocol" } -parser = { version = "0.1.0", path = "../../language/parser" } +parser-scala = { version = "0.1.0", path = "../../language/parser" } enso-data-structures = { path = "../../../../lib/rust/data-structures" } enso-logger = { path = "../../../../lib/rust/logger" } enso-prelude = { path = "../../../../lib/rust/prelude" } diff --git a/app/gui/controller/double-representation/src/alias_analysis.rs b/app/gui/controller/double-representation/src/alias_analysis.rs index 67a7fe6065..da973d4f1a 100644 --- a/app/gui/controller/double-representation/src/alias_analysis.rs +++ b/app/gui/controller/double-representation/src/alias_analysis.rs @@ -54,7 +54,7 @@ impl IdentifierUsage { /// Says whether the identifier occurrence introduces it into scope or uses it from scope. #[allow(missing_docs)] -#[derive(Clone, Copy, Debug, Display, PartialEq)] +#[derive(Clone, Copy, Debug, Display, PartialEq, Eq)] pub enum OccurrenceKind { Used, Introduced, @@ -63,7 +63,7 @@ pub enum OccurrenceKind { /// If the current context in the AST processor is a pattern context. // TODO [mwu] Refer to the specification once it is merged. #[allow(missing_docs)] -#[derive(Clone, Copy, Debug, Display, PartialEq)] +#[derive(Clone, Copy, Debug, Display, PartialEq, Eq)] pub enum Context { NonPattern, Pattern, @@ -388,7 +388,7 @@ mod tests { } /// Runs the test for the given test case description. - fn run_case(parser: &parser::Parser, case: Case) { + fn run_case(parser: &parser_scala::Parser, case: Case) { DEBUG!("\n===========================================================================\n"); DEBUG!("Case: " case.code); let ast = parser.parse_line_ast(&case.code).unwrap(); @@ -399,7 +399,7 @@ mod tests { } /// Runs the test for the test case expressed using markdown notation. See `Case` for details. - fn run_markdown_case(parser: &parser::Parser, marked_code: impl AsRef) { + fn run_markdown_case(parser: &parser_scala::Parser, marked_code: impl AsRef) { DEBUG!("Running test case for " marked_code.as_ref()); let case = Case::from_markdown(marked_code.as_ref()); run_case(parser, case) @@ -407,7 +407,7 @@ mod tests { #[wasm_bindgen_test] fn test_alias_analysis() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let test_cases = [ "»foo«", "«five» = 5", diff --git a/app/gui/controller/double-representation/src/connection.rs b/app/gui/controller/double-representation/src/connection.rs index 32cc1cb312..b13c10cf4c 100644 --- a/app/gui/controller/double-representation/src/connection.rs +++ b/app/gui/controller/double-representation/src/connection.rs @@ -19,7 +19,7 @@ use ast::crumbs::Crumbs; // ================ /// A connection endpoint. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct Endpoint { /// Id of the node where the endpoint is located. pub node: Id, @@ -61,7 +61,7 @@ pub type Destination = Endpoint; /// Describes a connection between two endpoints: from `source` to `destination`. #[allow(missing_docs)] -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct Connection { pub source: Source, pub destination: Destination, @@ -159,7 +159,7 @@ mod tests { use ast::crumbs; use ast::crumbs::InfixCrumb; - use parser::Parser; + use parser_scala::Parser; struct TestRun { graph: GraphInfo, diff --git a/app/gui/controller/double-representation/src/definition.rs b/app/gui/controller/double-representation/src/definition.rs index cf1157bb2e..689aecd307 100644 --- a/app/gui/controller/double-representation/src/definition.rs +++ b/app/gui/controller/double-representation/src/definition.rs @@ -11,7 +11,7 @@ use ast::crumbs::InfixCrumb; use ast::crumbs::Located; use ast::known; use ast::opr; -use parser::Parser; +use parser_scala::Parser; @@ -91,7 +91,7 @@ pub struct CannotFindChild(Crumb); // ================= /// Describes the kind of code block (scope) to which definition can belong. -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ScopeKind { /// Module scope is a file's top-level block. Root, @@ -594,7 +594,7 @@ mod tests { #[wasm_bindgen_test] fn definition_name_tests() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let ast = parser.parse_line_ast("Foo.Bar.baz").unwrap(); let name = DefinitionName::from_ast(&ast).unwrap(); @@ -609,14 +609,14 @@ mod tests { #[wasm_bindgen_test] fn definition_name_rejecting_incomplete_names() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let ast = parser.parse_line_ast("Foo. .baz").unwrap(); assert!(DefinitionName::from_ast(&ast).is_none()); } #[wasm_bindgen_test] fn definition_info_name() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let ast = parser.parse_line_ast("Foo.bar a b c = baz").unwrap(); let definition = DefinitionInfo::from_root_line_ast(&ast).unwrap(); @@ -626,7 +626,7 @@ mod tests { #[wasm_bindgen_test] fn located_definition_args() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let ast = parser.parse_line_ast("foo bar baz = a + b + c").unwrap(); let definition = DefinitionInfo::from_root_line_ast(&ast).unwrap(); let (arg0, arg1) = definition.args.expect_tuple(); @@ -668,7 +668,7 @@ mod tests { #[wasm_bindgen_test] fn list_definition_test() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); // TODO [mwu] // Due to a parser bug, extension methods defining operators cannot be currently @@ -723,7 +723,7 @@ mod tests { ("foo = bar\n\nmain = bar", 2), ]; - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let main_id = Id::new_plain_name("main"); for (program, expected_line_index) in program_to_expected_main_pos { let module = parser.parse_module(program, default()).unwrap(); @@ -749,19 +749,19 @@ main = add foo bar"; - let module = parser::Parser::new_or_panic().parse_module(program, default()).unwrap(); + let module = parser_scala::Parser::new_or_panic().parse_module(program, default()).unwrap(); let check_def = |id, expected_body| { let definition = module::get_definition(&module, &id).unwrap(); assert_eq!(definition.body().repr(), expected_body); }; let check_not_found = |id| assert!(module::get_definition(&module, &id).is_err()); - check_def(Id::new_plain_names(&["main", "add"]), "a + b"); - check_def(Id::new_plain_names(&["main", "baz"]), "\n subbaz arg = 4"); - check_def(Id::new_plain_names(&["main", "baz", "subbaz"]), "4"); + check_def(Id::new_plain_names(["main", "add"]), "a + b"); + check_def(Id::new_plain_names(["main", "baz"]), "\n subbaz arg = 4"); + check_def(Id::new_plain_names(["main", "baz", "subbaz"]), "4"); // Node are not definitions - check_not_found(Id::new_plain_names(&["main", "foo"])); - check_not_found(Id::new_plain_names(&["main", "baz2", "subbaz2"])); + check_not_found(Id::new_plain_names(["main", "foo"])); + check_not_found(Id::new_plain_names(["main", "baz2", "subbaz2"])); } } diff --git a/app/gui/controller/double-representation/src/graph.rs b/app/gui/controller/double-representation/src/graph.rs index 838a93d9d3..e30a7b083e 100644 --- a/app/gui/controller/double-representation/src/graph.rs +++ b/app/gui/controller/double-representation/src/graph.rs @@ -214,14 +214,14 @@ mod tests { wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser); /// Takes a program with main definition in root and returns main's graph. - fn main_graph(parser: &parser::Parser, program: impl Str) -> GraphInfo { + fn main_graph(parser: &parser_scala::Parser, program: impl Str) -> GraphInfo { let module = parser.parse_module(program.into(), default()).unwrap(); let name = DefinitionName::new_plain("main"); let main = module.def_iter().find_by_name(&name).unwrap(); GraphInfo::from_definition(main.item) } - fn find_graph(parser: &parser::Parser, program: impl Str, name: impl Str) -> GraphInfo { + fn find_graph(parser: &parser_scala::Parser, program: impl Str, name: impl Str) -> GraphInfo { let module = parser.parse_module(program.into(), default()).unwrap(); let crumbs = name.into().split('.').map(DefinitionName::new_plain).collect(); let id = Id { crumbs }; @@ -231,7 +231,7 @@ mod tests { #[wasm_bindgen_test] fn detect_a_node() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); // Each of these programs should have a `main` definition with a single `2+2` node. let programs = vec![ "main = 2+2", @@ -249,7 +249,7 @@ mod tests { } } - fn new_expression_node(parser: &parser::Parser, expression: &str) -> NodeInfo { + fn new_expression_node(parser: &parser_scala::Parser, expression: &str) -> NodeInfo { let node_ast = parser.parse(expression.to_string(), default()).unwrap(); let line_ast = expect_single_line(&node_ast).clone(); NodeInfo::from_main_line_ast(&line_ast).unwrap() @@ -274,7 +274,7 @@ mod tests { #[wasm_bindgen_test] fn add_node_to_graph_with_single_line() { let program = "main = print \"hello\""; - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let mut graph = main_graph(&parser, program); let nodes = graph.nodes(); assert_eq!(nodes.len(), 1); @@ -301,7 +301,7 @@ mod tests { foo = node foo a = not_node print "hello""#; - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let mut graph = main_graph(&parser, program); let node_to_add0 = new_expression_node(&parser, "4 + 4"); @@ -360,7 +360,7 @@ mod tests { node2 foo = 5"; - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let mut graph = main_graph(&parser, program); let id2 = graph.nodes()[0].id(); @@ -388,7 +388,7 @@ foo = 5"; #[wasm_bindgen_test] fn multiple_node_graph() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let program = r" main = ## Faux docstring @@ -419,7 +419,7 @@ main = #[wasm_bindgen_test] fn removing_node_from_graph() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let program = r" main = foo = 2 + 2 @@ -445,7 +445,7 @@ main = #[wasm_bindgen_test] fn removing_last_node_from_graph() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let program = r" main = foo = 2 + 2"; @@ -465,7 +465,7 @@ main = #[wasm_bindgen_test] fn editing_nodes_expression_in_graph() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let program = r" main = foo = 2 + 2 diff --git a/app/gui/controller/double-representation/src/lib.rs b/app/gui/controller/double-representation/src/lib.rs index 330bb9f453..fa5f14dc7e 100644 --- a/app/gui/controller/double-representation/src/lib.rs +++ b/app/gui/controller/double-representation/src/lib.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] @@ -205,7 +206,7 @@ mod tests { use crate::definition::DefinitionProvider; use ast::macros::DocumentationCommentInfo; - use parser::Parser; + use parser_scala::Parser; /// Expect `main` method, where first line is a documentation comment. @@ -230,7 +231,7 @@ mod tests { #[wasm_bindgen_test] fn parse_single_line_comment() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); // Typical single line case. let code = r#" @@ -267,7 +268,7 @@ main = #[wasm_bindgen_test] fn parse_multi_line_comment() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let code = r#" main = ## First line diff --git a/app/gui/controller/double-representation/src/module.rs b/app/gui/controller/double-representation/src/module.rs index c3afb7fd31..55af890d8f 100644 --- a/app/gui/controller/double-representation/src/module.rs +++ b/app/gui/controller/double-representation/src/module.rs @@ -573,7 +573,7 @@ impl Info { // TODO [mwu] // Ideally we should not require parser but should use some sane way of generating AST from // the `ImportInfo` value. - pub fn add_import(&mut self, parser: &parser::Parser, to_add: ImportInfo) -> usize { + pub fn add_import(&mut self, parser: &parser_scala::Parser, to_add: ImportInfo) -> usize { // Find last import that is not "after" the added one lexicographically. let previous_import = self.enumerate_imports().take_while(|(_, import)| to_add.target > import.target).last(); @@ -588,7 +588,7 @@ impl Info { pub fn add_module_import( &mut self, here: &QualifiedName, - parser: &parser::Parser, + parser: &parser_scala::Parser, to_add: &QualifiedName, ) { let is_here = to_add == here; @@ -648,7 +648,7 @@ impl Info { &mut self, method: definition::ToAdd, location: Placement, - parser: &parser::Parser, + parser: &parser_scala::Parser, ) -> FallibleResult { let no_indent = 0; let definition_ast = method.ast(no_indent, parser)?; @@ -687,7 +687,7 @@ impl From for Info { // ================= /// Structure describing where to place something being added to the module. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum Placement { /// Place at the beginning of the module. Begin, @@ -878,7 +878,7 @@ mod tests { #[wasm_bindgen_test] fn import_listing() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let expect_imports = |code: &str, expected: &[&[&str]]| { let ast = parser.parse_module(code, default()).unwrap(); let info = Info { ast }; @@ -901,7 +901,7 @@ mod tests { #[wasm_bindgen_test] fn import_adding_and_removing() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let code = "import Foo.Bar.Baz"; let ast = parser.parse_module(code, default()).unwrap(); let mut info = Info { ast }; @@ -930,7 +930,7 @@ mod tests { #[wasm_bindgen_test] fn implicit_method_resolution() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let module_name = QualifiedName::from_all_segments(&["local", "ProjectName", "Main"]).unwrap(); let expect_find = |method: &MethodPointer, code, expected: &definition::Id| { @@ -1002,7 +1002,7 @@ other def = last def = inline expression"; - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let module = parser.parse_module(code, default()).unwrap(); let module = Info { ast: module }; @@ -1014,14 +1014,14 @@ last def = inline expression"; let span = definition_span(&module.ast, &id).unwrap(); assert!(code[span].ends_with("inline expression")); - let id = definition::Id::new_plain_names(&["other", "nested"]); + let id = definition::Id::new_plain_names(["other", "nested"]); let span = definition_span(&module.ast, &id).unwrap(); assert!(code[span].ends_with("nested body")); } #[wasm_bindgen_test] fn add_method() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let module = r#"Main.method1 arg = body main = Main.method1 10"#; diff --git a/app/gui/controller/double-representation/src/refactorings/collapse.rs b/app/gui/controller/double-representation/src/refactorings/collapse.rs index 1e67dcb433..f3ec46b45a 100644 --- a/app/gui/controller/double-representation/src/refactorings/collapse.rs +++ b/app/gui/controller/double-representation/src/refactorings/collapse.rs @@ -17,7 +17,7 @@ use crate::node::NodeInfo; use ast::crumbs::Located; use ast::BlockLine; -use parser::Parser; +use parser_scala::Parser; use std::collections::BTreeSet; diff --git a/app/gui/controller/double-representation/src/text.rs b/app/gui/controller/double-representation/src/text.rs index c587c38dd8..f70d54f98f 100644 --- a/app/gui/controller/double-representation/src/text.rs +++ b/app/gui/controller/double-representation/src/text.rs @@ -185,7 +185,7 @@ mod test { use ast::HasIdMap; use enso_prelude::default; - use parser::Parser; + use parser_scala::Parser; use uuid::Uuid; /// A sample text edit used to test "text api" properties. @@ -266,9 +266,10 @@ mod test { /// Pretty prints the code of module with a single function named `main`. The lines should /// contain unindented main function's block lines. fn to_main(lines: impl IntoIterator>) -> String { + use std::fmt::Write; let mut ret = "main = ".to_string(); for line in lines { - ret.push_str(&format!("\n {}", line.as_ref())) + write!(ret, "\n {}", line.as_ref()).unwrap(); } ret } diff --git a/app/gui/controller/engine-model/src/lib.rs b/app/gui/controller/engine-model/src/lib.rs index 13a6caaec8..2453da5166 100644 --- a/app/gui/controller/engine-model/src/lib.rs +++ b/app/gui/controller/engine-model/src/lib.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] diff --git a/app/gui/controller/engine-protocol/Cargo.toml b/app/gui/controller/engine-protocol/Cargo.toml index de763da5b8..ca51a5f7b3 100644 --- a/app/gui/controller/engine-protocol/Cargo.toml +++ b/app/gui/controller/engine-protocol/Cargo.toml @@ -35,12 +35,12 @@ wasm-bindgen-test = { version = "0.3.8" } enso-web = { path = "../../../../lib/rust/web" } [build-dependencies] -enso-build-utilities = { path = "../../../../build/build-utils" } -bytes = { version = "0.5.4" } +enso-build-utilities = { path = "../../../../build/deprecated/build-utils" } +bytes = { version = "1.1.0" } flatc-rust = { version = "0.1.2" } futures = { version = "0.3.1" } -reqwest = { version = "0.10.1" } -tokio = { version = "0.2.10", features = ["macros"] } +reqwest = { version = "0.11.12" } +tokio = { workspace = true } # Zip is needed because the build script downloads and extracts artifacts from the Engine. -zip = { version = "0.5.0" } -zip-extensions = { version = "0.4.0" } +zip = { version = "0.6.2" } +zip-extensions = { version = "0.6.1" } diff --git a/app/gui/controller/engine-protocol/src/binary/client.rs b/app/gui/controller/engine-protocol/src/binary/client.rs index 2a41b3ea9a..405b75b1a3 100644 --- a/app/gui/controller/engine-protocol/src/binary/client.rs +++ b/app/gui/controller/engine-protocol/src/binary/client.rs @@ -39,7 +39,7 @@ pub type RpcError = json_rpc::error::RpcError; // ==================== /// The notifications that binary protocol client may receive. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum Notification { /// A new data has been sent for a visualization. VisualizationUpdate { diff --git a/app/gui/controller/engine-protocol/src/binary/message.rs b/app/gui/controller/engine-protocol/src/binary/message.rs index f270aea54f..ff1150b01d 100644 --- a/app/gui/controller/engine-protocol/src/binary/message.rs +++ b/app/gui/controller/engine-protocol/src/binary/message.rs @@ -63,7 +63,7 @@ impl MessageFromServer { /// Identifies the visualization in the update message. #[allow(missing_docs)] -#[derive(Clone, Debug, Copy, PartialEq)] +#[derive(Clone, Debug, Copy, PartialEq, Eq)] pub struct VisualisationContext { pub visualization_id: Uuid, pub context_id: Uuid, @@ -77,7 +77,7 @@ pub enum ErrorPayload { } #[allow(missing_docs)] -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct FileSegment { pub path: LSPath, pub byte_offset: u64, @@ -85,7 +85,7 @@ pub struct FileSegment { } #[allow(missing_docs)] -#[derive(Clone, Debug, Default, PartialEq)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct EnsoDigest { pub bytes: Vec, } @@ -97,7 +97,7 @@ pub struct EnsoDigest { // ================ #[allow(missing_docs)] -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum ToServerPayloadOwned { InitSession { client_id: Uuid }, WriteFile { path: LSPath, contents: Vec }, diff --git a/app/gui/controller/engine-protocol/src/language_server/types.rs b/app/gui/controller/engine-protocol/src/language_server/types.rs index ab8c7d2d97..da197f8f34 100644 --- a/app/gui/controller/engine-protocol/src/language_server/types.rs +++ b/app/gui/controller/engine-protocol/src/language_server/types.rs @@ -108,7 +108,7 @@ impl Path { // ==================== /// Notification generated by the Language Server. -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, IntoStaticStr)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, IntoStaticStr, Eq)] #[serde(tag = "method", content = "params")] pub enum Notification { /// Filesystem event occurred for a watched path. @@ -162,7 +162,7 @@ pub enum Notification { /// Sent from the server to the client to inform about a failure during execution of an execution /// context. -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] #[serde(rename_all = "camelCase")] pub struct ExecutionFailed { @@ -171,7 +171,7 @@ pub struct ExecutionFailed { } /// Sent from server to the client to inform about a failure during execution of a visualisation. -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] #[serde(rename_all = "camelCase")] pub struct VisualisationEvaluationFailed { @@ -190,7 +190,7 @@ pub struct VisualisationEvaluationFailed { /// Sent from the server to the client to inform about new information for certain expressions /// becoming available. -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] #[serde(rename_all = "camelCase")] pub struct ExpressionUpdates { @@ -199,13 +199,13 @@ pub struct ExpressionUpdates { } /// An update about the computed expression. -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] #[serde(rename_all = "camelCase")] pub struct ExpressionUpdate { pub expression_id: ExpressionId, #[serde(rename = "type")] // To avoid collision with the `type` keyword. - pub typename: Option, + pub typename: Option, pub method_pointer: Option, pub profiling_info: Vec, pub from_cache: bool, @@ -214,7 +214,7 @@ pub struct ExpressionUpdate { /// Profiling information on an executed expression. It is implemented as a union as additional /// types of information will be added in the future. -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] // Not sure what the future variants will be, and implementing Copy is not essential for this. #[allow(missing_copy_implementations)] @@ -223,7 +223,7 @@ pub enum ProfilingInfo { ExecutionTime { nano_time: u64 }, } -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] #[serde(tag = "type")] pub enum ExpressionUpdatePayload { @@ -251,7 +251,7 @@ pub enum ExpressionUpdatePayload { // ======================= /// Sent from the server to the client to inform about a status of execution. -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] #[serde(rename_all = "camelCase")] pub struct ExecutionStatus { @@ -260,7 +260,7 @@ pub struct ExecutionStatus { } /// The type of diagnostic message. -#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] pub enum DiagnosticType { Error, @@ -276,7 +276,7 @@ pub enum DiagnosticType { // a builtin node. Then, to locate the error in the code, you can use the stack field with a stack // trace to find the first element with non-empty location (as the head of the stack will point to // the builtin element). -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] #[serde(rename_all = "camelCase")] pub struct Diagnostic { @@ -290,7 +290,7 @@ pub struct Diagnostic { /// The frame of the stack trace. If the error refer to a builtin node, the path and location fields /// will be empty. -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, Eq)] #[allow(missing_docs)] #[serde(rename_all = "camelCase")] pub struct StackTraceElement { @@ -308,7 +308,7 @@ pub struct StackTraceElement { // === FileEvent === /// The `file/event` notification parameters. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Serialize, Deserialize)] #[allow(missing_docs)] pub struct FileEvent { @@ -317,7 +317,7 @@ pub struct FileEvent { } /// Describes kind of filesystem event (was the file created or deleted, etc.) -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Serialize, Deserialize)] #[allow(missing_docs)] pub enum FileEventKind { diff --git a/app/gui/controller/engine-protocol/src/lib.rs b/app/gui/controller/engine-protocol/src/lib.rs index c08d39d650..3584fc6fbf 100644 --- a/app/gui/controller/engine-protocol/src/lib.rs +++ b/app/gui/controller/engine-protocol/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/app/gui/controller/engine-protocol/src/project_manager.rs b/app/gui/controller/engine-protocol/src/project_manager.rs index 95e0ed168b..9f1c663e2f 100644 --- a/app/gui/controller/engine-protocol/src/project_manager.rs +++ b/app/gui/controller/engine-protocol/src/project_manager.rs @@ -90,7 +90,7 @@ trait API { // ============= /// Address consisting of host and port. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct IpWithSocket { /// Host name. pub host: String, @@ -131,7 +131,7 @@ impl From for String { } /// Project information, such as name, its id and last time it was opened. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct ProjectMetadata { /// Project's name. @@ -164,14 +164,14 @@ pub mod response { use super::*; /// Response of `list_projects` and `list_samples`. - #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct ProjectList { /// List of projects. pub projects: Vec, } /// Response of `create_project`. - #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)] + #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct CreateProject { /// Created project uuid. @@ -179,7 +179,7 @@ pub mod response { } /// Response of `open_project`. - #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct OpenProject { /// The version of the started language server represented by a semver version string. diff --git a/app/gui/controller/src/lib.rs b/app/gui/controller/src/lib.rs index 13a6caaec8..2453da5166 100644 --- a/app/gui/controller/src/lib.rs +++ b/app/gui/controller/src/lib.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] diff --git a/app/gui/enso-profiler-enso-data/src/bin/api_events_to_profile.rs b/app/gui/enso-profiler-enso-data/src/bin/api_events_to_profile.rs index 39ed731afd..a627ad331d 100644 --- a/app/gui/enso-profiler-enso-data/src/bin/api_events_to_profile.rs +++ b/app/gui/enso-profiler-enso-data/src/bin/api_events_to_profile.rs @@ -17,6 +17,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/app/gui/enso-profiler-enso-data/src/bin/message_beanpoles.rs b/app/gui/enso-profiler-enso-data/src/bin/message_beanpoles.rs index ed9f1009f6..47f4d233fa 100644 --- a/app/gui/enso-profiler-enso-data/src/bin/message_beanpoles.rs +++ b/app/gui/enso-profiler-enso-data/src/bin/message_beanpoles.rs @@ -34,6 +34,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/app/gui/enso-profiler-enso-data/src/lib.rs b/app/gui/enso-profiler-enso-data/src/lib.rs index 6f9fced000..b708db08c3 100644 --- a/app/gui/enso-profiler-enso-data/src/lib.rs +++ b/app/gui/enso-profiler-enso-data/src/lib.rs @@ -5,6 +5,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/app/gui/language/ast/impl/src/assoc.rs b/app/gui/language/ast/impl/src/assoc.rs index 81b967cf6d..8318fae2a7 100644 --- a/app/gui/language/ast/impl/src/assoc.rs +++ b/app/gui/language/ast/impl/src/assoc.rs @@ -12,7 +12,7 @@ use regex::Regex; /// Operator associativity. #[allow(missing_docs)] -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Assoc { Left, Right, diff --git a/app/gui/language/ast/impl/src/known.rs b/app/gui/language/ast/impl/src/known.rs index 773313f1c9..e3326f2337 100644 --- a/app/gui/language/ast/impl/src/known.rs +++ b/app/gui/language/ast/impl/src/known.rs @@ -26,7 +26,7 @@ use serde::Serializer; /// Provides `Deref` implementation that allows accessing underlying shape `T` value. #[derive(CloneRef, Derivative)] #[derivative(Clone(bound = ""))] -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug, PartialEq, Eq)] pub struct KnownAst { ast: Ast, phantom: PhantomData, diff --git a/app/gui/language/ast/impl/src/lib.rs b/app/gui/language/ast/impl/src/lib.rs index 30013d5b1e..8b2062ea60 100644 --- a/app/gui/language/ast/impl/src/lib.rs +++ b/app/gui/language/ast/impl/src/lib.rs @@ -1,12 +1,12 @@ // === Features === #![feature(associated_type_bounds)] -#![feature(bool_to_option)] #![feature(generators, generator_trait)] #![feature(trivial_bounds)] #![feature(type_alias_impl_trait)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use crate::prelude::*; @@ -950,7 +950,7 @@ pub struct Modified { // === Tokenizer === /// An enum of valid Ast tokens. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum Token<'a> { Off(usize), Chr(char), @@ -1862,7 +1862,7 @@ mod tests { let ab = Ast::prefix(a, b); let abc = Ast::prefix(ab, c); // repr is `a b c` - assert_eq!((&abc).iter().count(), 2); // for App's two children + assert_eq!((abc).iter().count(), 2); // for App's two children assert_eq!(abc.iter_recursive().count(), 5); // for 2 Apps and 3 Vars } diff --git a/app/gui/language/ast/macros/src/lib.rs b/app/gui/language/ast/macros/src/lib.rs index f6ef5b66a7..846d843e03 100644 --- a/app/gui/language/ast/macros/src/lib.rs +++ b/app/gui/language/ast/macros/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/app/gui/language/parser/Cargo.toml b/app/gui/language/parser/Cargo.toml index a598f24f68..3458b005d8 100644 --- a/app/gui/language/parser/Cargo.toml +++ b/app/gui/language/parser/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "parser" +name = "parser-scala" version = "0.1.0" authors = ["Enso Team "] edition = "2021" @@ -31,11 +31,11 @@ wasm-bindgen = { version = "0.2.78" } wasm-bindgen-test = { version = "0.3.8" } [build-dependencies] -enso-build-utilities = { path = "../../../../build/build-utils" } -bytes = { version = "0.5.4" } +ide-ci = { path = "../../../../build/ci_utils" } +bytes = { version = "1.1.0" } futures = { version = "0.3.1" } -reqwest = { version = "0.10.1" } -tokio = { version = "0.2.10", features = ["macros"] } +reqwest = { version = "0.11.12" } +tokio = { workspace = true } [target.'cfg(not(target_arch = "wasm32"))'.dependencies] websocket = "0.23.0" diff --git a/app/gui/language/parser/build.rs b/app/gui/language/parser/build.rs index 1eda9eccc1..c8aa3c1f12 100644 --- a/app/gui/language/parser/build.rs +++ b/app/gui/language/parser/build.rs @@ -5,15 +5,7 @@ // === Features === #![feature(option_result_contains)] -use std::io::prelude::*; - -use enso_build_utilities::absolute_path; -use enso_build_utilities::targeting_wasm; -use enso_build_utilities::PathRef; -use std::fs; -use std::fs::create_dir_all; -use std::fs::File; -use std::path::PathBuf; +use ide_ci::prelude::*; @@ -49,7 +41,7 @@ pub fn parser_url(version: &ParserVersion) -> reqwest::Url { // =================== /// Parser version described as commit hash from `enso` repository. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct ParserVersion { pub commit: String, } @@ -85,60 +77,40 @@ struct ParserProvider { impl ParserProvider { /// Creates a provider that obtains given parser version to a given path. - pub fn new(version: ParserVersion, parser_path: impl PathRef) -> ParserProvider { + pub fn new(version: ParserVersion, parser_path: impl AsRef) -> ParserProvider { let parser_path = PathBuf::from(parser_path.as_ref()); ParserProvider { version, parser_path } } /// Downloads contents of JS parser into memory. - pub async fn download(&self) -> bytes::Bytes { + pub async fn download(&self) -> Result { let url = parser_url(&self.version); - let get_error = format!("Failed to get response from {}.", url); - let download_error = format!("Failed to download contents of {}.", url); - let server_error = format!("Server replied with error when getting {}.", url); - let response = reqwest::get(url).await.expect(&get_error); - let response = response.error_for_status().expect(&server_error); - response.bytes().await.expect(&download_error) + ide_ci::io::download_all(url.clone()).await.context("Failed to download the parser.") } /// Stores JS parser into file, after patching with a `PARSER_PREAMBLE`. - pub fn patch_and_store(&self, js_parser: bytes::Bytes) { - let display_path = self.parser_path.display(); - let open_error = format!("Failed to open {}.", display_path); - let write_error = format!("Failed to write {}.", display_path); - let flush_error = format!("Failed to flush {}.", display_path); - - let mut file = File::create(&self.parser_path).expect(&open_error); - file.write_all(PARSER_PREAMBLE.as_bytes()).expect(&write_error); - file.write_all(&js_parser).expect(&write_error); - file.flush().expect(&flush_error); - } - - /// Ensures that target's parent directory exists. - pub fn prepare_target_location(&self) { - let parent_directory = - self.parser_path.parent().expect("Unable to access parent directory."); - let create_dir_error = - format!("Failed to create directory: {}.", parent_directory.display()); - create_dir_all(parent_directory).expect(&create_dir_error); + pub async fn patch_and_store(&self, js_parser: bytes::Bytes) -> Result { + ide_ci::fs::tokio::write_iter(&self.parser_path, [ + PARSER_PREAMBLE.as_bytes(), + js_parser.as_ref(), + ]) + .await } /// Places required parser version in the target location. - pub async fn run(&self) { - self.prepare_target_location(); - let parent_directory = - self.parser_path.parent().expect("Unable to access parent directory."); - let fingerprint = parent_directory.join("parser.fingerprint"); - let opt_version = fs::read_to_string(&fingerprint); + pub async fn run(&self) -> Result { + let fingerprint = self.parser_path.with_file_name("parser.fingerprint"); + let opt_version = ide_ci::fs::tokio::read_to_string(&fingerprint).await; let changed = match opt_version { Err(_) => true, Ok(hash) => hash != PARSER_COMMIT, }; if changed { - let parser_js = self.download().await; - self.patch_and_store(parser_js); - fs::write(&fingerprint, PARSER_COMMIT).expect("Unable to write parser fingerprint."); + let parser_js = self.download().await?; + self.patch_and_store(parser_js).await?; + ide_ci::fs::tokio::write(&fingerprint, PARSER_COMMIT).await?; } + Ok(()) } } @@ -149,12 +121,12 @@ impl ParserProvider { // ========== #[tokio::main] -async fn main() -> std::result::Result<(), Box> { - if targeting_wasm() { +async fn main() -> Result { + if ide_ci::programs::cargo::build_env::targeting_wasm() { let required_version = ParserVersion::required(); - let parser_path = absolute_path(PARSER_PATH)?; + let parser_path = Path::new(PARSER_PATH).absolutize()?; let provider = ParserProvider::new(required_version, &parser_path); - provider.run().await; + provider.run().await?; } println!("cargo:rerun-if-changed=build.rs"); println!("cargo:rerun-if-changed={}", PARSER_PATH); diff --git a/app/gui/language/parser/src/main.rs b/app/gui/language/parser/src/bin/run-scala-parser.rs similarity index 88% rename from app/gui/language/parser/src/main.rs rename to app/gui/language/parser/src/bin/run-scala-parser.rs index defe8a85c8..d8f52ed66e 100644 --- a/app/gui/language/parser/src/main.rs +++ b/app/gui/language/parser/src/bin/run-scala-parser.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use enso_prelude::*; @@ -15,7 +16,7 @@ fn main() { let program = std::env::args().nth(1).unwrap_or(default_input); DEBUG!("Will parse: " program); - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let output = parser.parse(program, default()); match output { Ok(result) => DEBUG!("Parser responded with: {result:?}"), @@ -27,7 +28,7 @@ fn main() { let program = std::env::args().nth(1).unwrap_or(default_input); DEBUG!("Will parse: " program); - let parser = parser::DocParser::new_or_panic(); + let parser = parser_scala::DocParser::new_or_panic(); let output = parser.generate_html_docs(program); match output { Ok(result) => DEBUG!("Doc parser responded with: {result:?}"), @@ -39,7 +40,7 @@ fn main() { let program = std::env::args().nth(1).unwrap_or(default_input); DEBUG!("Will parse: " program); - let parser = parser::DocParser::new_or_panic(); + let parser = parser_scala::DocParser::new_or_panic(); let output = parser.generate_html_doc_pure(program); match output { Ok(result) => DEBUG!("Doc parser responded with: {result:?}"), diff --git a/app/gui/language/parser/src/lib.rs b/app/gui/language/parser/src/lib.rs index 9c6fbd1bdf..4fddd3f6be 100644 --- a/app/gui/language/parser/src/lib.rs +++ b/app/gui/language/parser/src/lib.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/app/gui/language/parser/tests/ast.rs b/app/gui/language/parser/tests/ast.rs index 9f2624244c..0e7731d6c3 100644 --- a/app/gui/language/parser/tests/ast.rs +++ b/app/gui/language/parser/tests/ast.rs @@ -10,7 +10,7 @@ #![deny(non_ascii_idents)] #![warn(unsafe_code)] -use parser::prelude::*; +use parser_scala::prelude::*; use ast::opr; use ast::opr::GeneralizedInfix; @@ -25,7 +25,7 @@ wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser); #[wasm_bindgen_test] pub fn to_assignment_test() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let is_assignment = |code: &str| { let ast = parser.parse(code.to_string(), default()).unwrap(); let line = expect_single_line(&ast); @@ -45,7 +45,7 @@ pub fn to_assignment_test() { #[wasm_bindgen_test] pub fn generalized_infix_test() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let make_gen_infix = |code: &str| { let ast = parser.parse(code.to_string(), default()).unwrap(); let line = expect_single_line(&ast); @@ -83,7 +83,7 @@ pub fn flatten_prefix_test() { }) } - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let case = |code: &str, expected_pieces: Vec<&str>| { let ast = parser.parse(code.into(), default()).unwrap(); let ast = ast::test_utils::expect_single_line(&ast); @@ -110,7 +110,7 @@ pub fn flatten_infix_test() { }) } - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let case = |code: &str, target: &str, expected_pieces: Vec<&str>| { let ast = parser.parse(code.into(), default()).unwrap(); let ast = ast::test_utils::expect_single_line(&ast); diff --git a/app/gui/language/parser/tests/bugs.rs b/app/gui/language/parser/tests/bugs.rs index 47b6d34402..bb9abb73a9 100644 --- a/app/gui/language/parser/tests/bugs.rs +++ b/app/gui/language/parser/tests/bugs.rs @@ -16,7 +16,7 @@ wasm_bindgen_test_configure!(run_in_browser); fn no_doc_found() { let input = String::from("type Foo\n type Bar"); let program = std::env::args().nth(1).unwrap_or(input); - let parser = parser::DocParser::new_or_panic(); + let parser = parser_scala::DocParser::new_or_panic(); let gen_code = parser.generate_html_docs(program).unwrap(); // gen_code should be empty. assert_eq!(gen_code.len(), 22, "Generated length differs from the expected\"{}\"", gen_code); @@ -24,7 +24,7 @@ fn no_doc_found() { #[wasm_bindgen_test] fn extension_operator_methods() { - let ast = parser::Parser::new_or_panic().parse_line_ast("Int.+").unwrap(); + let ast = parser_scala::Parser::new_or_panic().parse_line_ast("Int.+").unwrap(); use ast::*; if let Shape::Infix(Infix { larg: _larg, loff: _loff, opr, roff: _roff, rarg }, ..) = diff --git a/app/gui/language/parser/tests/crumbs.rs b/app/gui/language/parser/tests/crumbs.rs index 18aa70d215..7ad905912b 100644 --- a/app/gui/language/parser/tests/crumbs.rs +++ b/app/gui/language/parser/tests/crumbs.rs @@ -6,7 +6,7 @@ use enso_prelude::*; use ast::crumbs::Crumbable; use ast::HasRepr; -use parser::Parser; +use parser_scala::Parser; use wasm_bindgen_test::wasm_bindgen_test; use wasm_bindgen_test::wasm_bindgen_test_configure; diff --git a/app/gui/language/parser/tests/doc-gen.rs b/app/gui/language/parser/tests/doc-gen.rs index 3772dd15af..0ed454bc19 100644 --- a/app/gui/language/parser/tests/doc-gen.rs +++ b/app/gui/language/parser/tests/doc-gen.rs @@ -2,7 +2,7 @@ #![deny(non_ascii_idents)] #![warn(unsafe_code)] -use parser::DocParser; +use parser_scala::DocParser; use wasm_bindgen_test::wasm_bindgen_test; use wasm_bindgen_test::wasm_bindgen_test_configure; diff --git a/app/gui/language/parser/tests/id_map.rs b/app/gui/language/parser/tests/id_map.rs index 801475fdcc..42e9de66d5 100644 --- a/app/gui/language/parser/tests/id_map.rs +++ b/app/gui/language/parser/tests/id_map.rs @@ -2,10 +2,10 @@ #![deny(non_ascii_idents)] #![warn(unsafe_code)] -use parser::prelude::*; +use parser_scala::prelude::*; use ast::HasIdMap; -use parser::Parser; +use parser_scala::Parser; use wasm_bindgen_test::wasm_bindgen_test; use wasm_bindgen_test::wasm_bindgen_test_configure; diff --git a/app/gui/language/parser/tests/macros.rs b/app/gui/language/parser/tests/macros.rs index ff30812a7b..91dcc8b381 100644 --- a/app/gui/language/parser/tests/macros.rs +++ b/app/gui/language/parser/tests/macros.rs @@ -2,9 +2,9 @@ #![deny(non_ascii_idents)] #![warn(unsafe_code)] -use parser::prelude::*; +use parser_scala::prelude::*; -use parser::Parser; +use parser_scala::Parser; use wasm_bindgen_test::wasm_bindgen_test; use wasm_bindgen_test::wasm_bindgen_test_configure; diff --git a/app/gui/language/parser/tests/parsing.rs b/app/gui/language/parser/tests/parsing.rs index 7fd29c8a56..b41fda5fd7 100644 --- a/app/gui/language/parser/tests/parsing.rs +++ b/app/gui/language/parser/tests/parsing.rs @@ -5,11 +5,11 @@ #![warn(unsafe_code)] use ast::*; -use parser::prelude::*; +use parser_scala::prelude::*; use ast::test_utils::expect_shape; -use parser::api::Metadata; -use parser::api::ParsedSourceFile; +use parser_scala::api::Metadata; +use parser_scala::api::ParsedSourceFile; use serde::de::DeserializeOwned; use serde::Deserialize; use serde::Serialize; @@ -40,13 +40,13 @@ fn assert_opr>(ast: &Ast, name: StringLike) { assert_eq!(*actual, expected); } -fn roundtrip_program_with(parser: &parser::Parser, program: &str) { +fn roundtrip_program_with(parser: &parser_scala::Parser, program: &str) { let ast = parser.parse(program.to_string(), Default::default()).unwrap(); assert_eq!(ast.repr(), program, "{:#?}", ast); } fn roundtrip_program(program: &str) { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); roundtrip_program_with(&parser, program); } @@ -71,7 +71,7 @@ impl Metadata for FauxMetadata {} /// Persists parser (which is expensive to construct, so we want to reuse it /// between tests. Additionally, hosts a number of helper methods. struct Fixture { - parser: parser::Parser, + parser: parser_scala::Parser, } impl Fixture { @@ -79,7 +79,7 @@ impl Fixture { /// Create a new fixture, obtaining a default parser. fn new() -> Fixture { - Fixture { parser: parser::Parser::new_or_panic() } + Fixture { parser: parser_scala::Parser::new_or_panic() } } /// Program is expected to be single line module. The line's Shape subtype @@ -151,7 +151,7 @@ impl Fixture { fn deserialize_blank(&mut self) { let expect_blank = |_: &Blank| {}; - let _ast = self.test_shape("_", expect_blank); + self.test_shape("_", expect_blank); } fn deserialize_var(&mut self) { @@ -490,7 +490,7 @@ fn block_roundtrip() { /// Test case for https://github.com/enso-org/ide/issues/296 #[wasm_bindgen_test] fn nested_macros() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); // Generate nested brackets. Stop at 8 because it gets slower and slower. // At 12 the deserialization fails on WASM. @@ -532,7 +532,7 @@ fn dealing_with_invalid_metadata() { let serialized_text_metadata = serde_json::to_string(&metadata).unwrap(); assert!(serde_json::from_str::>(&serialized_text_metadata).is_err()); - let parsed_file = parser::api::ParsedSourceFile { ast, metadata }; + let parsed_file = parser_scala::api::ParsedSourceFile { ast, metadata }; let generated = parsed_file.serialize().unwrap(); let expected_generated = r#"variable1 diff --git a/app/gui/language/parser/tests/web.rs b/app/gui/language/parser/tests/web.rs index 1a11837d07..27683dc1cc 100644 --- a/app/gui/language/parser/tests/web.rs +++ b/app/gui/language/parser/tests/web.rs @@ -5,8 +5,8 @@ use enso_prelude::*; use ast::Ast; -use parser::api::ParsedSourceFile; -use parser::Parser; +use parser_scala::api::ParsedSourceFile; +use parser_scala::Parser; use uuid::Uuid; use wasm_bindgen_test::wasm_bindgen_test; use wasm_bindgen_test::wasm_bindgen_test_configure; diff --git a/app/gui/language/span-tree/Cargo.toml b/app/gui/language/span-tree/Cargo.toml index 19c3ff2c69..6aea518400 100644 --- a/app/gui/language/span-tree/Cargo.toml +++ b/app/gui/language/span-tree/Cargo.toml @@ -13,5 +13,5 @@ enso-profiler = { path = "../../../../lib/rust/profiler" } failure = { version = "0.1.6" } [dev-dependencies] -parser = { path = "../parser" } +parser-scala = { path = "../parser" } wasm-bindgen-test = { version = "0.3.8" } diff --git a/app/gui/language/span-tree/example/src/lib.rs b/app/gui/language/span-tree/example/src/lib.rs index facc80a38a..d4c6f51a1f 100644 --- a/app/gui/language/span-tree/example/src/lib.rs +++ b/app/gui/language/span-tree/example/src/lib.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use ast::crumbs::PatternMatchCrumb::*; diff --git a/app/gui/language/span-tree/src/action.rs b/app/gui/language/span-tree/src/action.rs index d23bb0d8c6..0f9f812f81 100644 --- a/app/gui/language/span-tree/src/action.rs +++ b/app/gui/language/span-tree/src/action.rs @@ -239,7 +239,7 @@ mod test { use crate::SpanTree; use ast::HasRepr; - use parser::Parser; + use parser_scala::Parser; use wasm_bindgen_test::wasm_bindgen_test; #[wasm_bindgen_test] diff --git a/app/gui/language/span-tree/src/generate.rs b/app/gui/language/span-tree/src/generate.rs index 5728e0f530..4839589465 100644 --- a/app/gui/language/span-tree/src/generate.rs +++ b/app/gui/language/span-tree/src/generate.rs @@ -626,7 +626,7 @@ mod test { use ast::crumbs::SectionSidesCrumb; use ast::Crumbs; use ast::IdMap; - use parser::Parser; + use parser_scala::Parser; use wasm_bindgen_test::wasm_bindgen_test; use wasm_bindgen_test::wasm_bindgen_test_configure; diff --git a/app/gui/language/span-tree/src/lib.rs b/app/gui/language/span-tree/src/lib.rs index e0eba1d319..ff5bd9d19a 100644 --- a/app/gui/language/span-tree/src/lib.rs +++ b/app/gui/language/span-tree/src/lib.rs @@ -15,6 +15,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/app/gui/language/span-tree/src/node.rs b/app/gui/language/span-tree/src/node.rs index 5d1ff47485..3d8625f7de 100644 --- a/app/gui/language/span-tree/src/node.rs +++ b/app/gui/language/span-tree/src/node.rs @@ -383,7 +383,7 @@ impl<'a> IntoIterator for &'a Crumbs { type Item = &'a Crumb; type IntoIter = std::slice::Iter<'a, Crumb>; fn into_iter(self) -> Self::IntoIter { - (&*self.vec).iter() + (*self.vec).iter() } } diff --git a/app/gui/src/controller/graph.rs b/app/gui/src/controller/graph.rs index edd7914ee5..43ec588b74 100644 --- a/app/gui/src/controller/graph.rs +++ b/app/gui/src/controller/graph.rs @@ -23,7 +23,7 @@ use double_representation::node::MainLine; use double_representation::node::NodeInfo; use double_representation::node::NodeLocation; use engine_protocol::language_server; -use parser::Parser; +use parser_scala::Parser; use span_tree::action::Action; use span_tree::action::Actions; use span_tree::generate::context::CalledMethodInfo; @@ -395,7 +395,7 @@ impl EndpointInfo { } /// Iterates over sibling ports located after this endpoint in its chain. - pub fn chained_ports_after<'a>(&'a self) -> impl Iterator + 'a { + pub fn chained_ports_after(&self) -> impl Iterator + '_ { let parent_port = self.parent_chain_port(); let ports_after = parent_port.map(move |parent_port| { parent_port @@ -993,7 +993,7 @@ pub mod tests { use double_representation::project; use engine_protocol::language_server::MethodPointer; use enso_text::index::*; - use parser::Parser; + use parser_scala::Parser; use wasm_bindgen_test::wasm_bindgen_test; @@ -1238,7 +1238,7 @@ main = bar b = 5 print foo" .into(); - test.data.graph_id = definition::Id::new_plain_names(&["main", "foo"]); + test.data.graph_id = definition::Id::new_plain_names(["main", "foo"]); test.run(|graph| async move { let expression = "new_node"; graph.add_node(NewNodeInfo::new_pushed_back(expression)).unwrap(); @@ -1346,7 +1346,7 @@ main = // Not using multi-line raw string literals, as we don't want IntelliJ to automatically // strip the trailing whitespace in the lines. test.data.code = "main =\n foo a =\n bar b = 5\n print foo".into(); - test.data.graph_id = definition::Id::new_plain_names(&["main", "foo", "bar"]); + test.data.graph_id = definition::Id::new_plain_names(["main", "foo", "bar"]); test.run(|graph| async move { let expression = "new_node"; graph.add_node(NewNodeInfo::new_pushed_back(expression)).unwrap(); diff --git a/app/gui/src/controller/graph/executed.rs b/app/gui/src/controller/graph/executed.rs index 0b9e53b70f..c2ed135ed5 100644 --- a/app/gui/src/controller/graph/executed.rs +++ b/app/gui/src/controller/graph/executed.rs @@ -52,7 +52,7 @@ pub struct NoResolvedMethod(double_representation::node::Id); /// Notification about change in the executed graph. /// /// It may pertain either the state of the graph itself or the notifications from the execution. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum Notification { /// The notification passed from the graph controller. Graph(controller::graph::Notification), @@ -376,7 +376,7 @@ pub mod tests { impl MockData { pub fn controller(&self) -> Handle { let logger = Logger::new("test"); - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let repository = Rc::new(model::undo_redo::Repository::new(&logger)); let module = self.module.plain(&parser, repository); let method = self.graph.method(); diff --git a/app/gui/src/controller/ide.rs b/app/gui/src/controller/ide.rs index b0503c9b4a..976bfe199a 100644 --- a/app/gui/src/controller/ide.rs +++ b/app/gui/src/controller/ide.rs @@ -8,7 +8,7 @@ use crate::prelude::*; use crate::notification; use mockall::automock; -use parser::Parser; +use parser_scala::Parser; // ============== diff --git a/app/gui/src/controller/ide/desktop.rs b/app/gui/src/controller/ide/desktop.rs index ece1a61a5a..cde6c71d2d 100644 --- a/app/gui/src/controller/ide/desktop.rs +++ b/app/gui/src/controller/ide/desktop.rs @@ -16,7 +16,7 @@ use engine_protocol::project_manager; use engine_protocol::project_manager::MissingComponentAction; use engine_protocol::project_manager::ProjectMetadata; use engine_protocol::project_manager::ProjectName; -use parser::Parser; +use parser_scala::Parser; diff --git a/app/gui/src/controller/ide/plain.rs b/app/gui/src/controller/ide/plain.rs index 2cbc5af60c..563b34ba70 100644 --- a/app/gui/src/controller/ide/plain.rs +++ b/app/gui/src/controller/ide/plain.rs @@ -11,7 +11,7 @@ use crate::model::project::synchronized::Properties; use double_representation::project; use engine_protocol::project_manager::ProjectName; -use parser::Parser; +use parser_scala::Parser; diff --git a/app/gui/src/controller/module.rs b/app/gui/src/controller/module.rs index 89b4c37312..33fb4028a3 100644 --- a/app/gui/src/controller/module.rs +++ b/app/gui/src/controller/module.rs @@ -12,7 +12,7 @@ use double_representation::project; use double_representation::text::apply_code_change_to_id_map; use engine_protocol::language_server; use engine_protocol::types::Sha3_224; -use parser::Parser; +use parser_scala::Parser; @@ -212,7 +212,7 @@ mod test { use ast::Ast; use ast::BlockLine; use enso_text::index::*; - use parser::Parser; + use parser_scala::Parser; use uuid::Uuid; use wasm_bindgen_test::wasm_bindgen_test; diff --git a/app/gui/src/controller/project.rs b/app/gui/src/controller/project.rs index 8c86e40d45..338e44c05c 100644 --- a/app/gui/src/controller/project.rs +++ b/app/gui/src/controller/project.rs @@ -11,7 +11,7 @@ use engine_protocol::language_server::MethodPointer; use engine_protocol::language_server::Path; use enso_frp::web::platform; use enso_frp::web::platform::Platform; -use parser::Parser; +use parser_scala::Parser; @@ -260,7 +260,7 @@ mod tests { #[wasm_bindgen_test] fn adding_missing_main() { let _ctx = TestWithLocalPoolExecutor::set_up(); - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); let mut data = crate::test::mock::Unified::new(); let module_name = data.module_path.module_name(); let main_ptr = main_method_ptr(data.project_name.clone(), &data.module_path); diff --git a/app/gui/src/controller/searcher.rs b/app/gui/src/controller/searcher.rs index 60fa9c5683..b0f6831510 100644 --- a/app/gui/src/controller/searcher.rs +++ b/app/gui/src/controller/searcher.rs @@ -26,7 +26,7 @@ use enso_text::Byte; use enso_text::Location; use enso_text::Rope; use flo_stream::Subscriber; -use parser::Parser; +use parser_scala::Parser; // ============== @@ -550,7 +550,7 @@ impl Searcher { Data::new_with_edited_node( project.qualified_name(), &graph.graph(), - &*database, + &database, node_id, )? } else { diff --git a/app/gui/src/controller/searcher/component/builder.rs b/app/gui/src/controller/searcher/component/builder.rs index 3747d94554..b6f52dda8a 100644 --- a/app/gui/src/controller/searcher/component/builder.rs +++ b/app/gui/src/controller/searcher/component/builder.rs @@ -228,7 +228,7 @@ impl List { if self.module_groups.contains_key(&module_id) { self.module_groups.get_mut(&module_id) } else { - let groups = ModuleGroups::new(module_id, &*db_entry).ok()?; + let groups = ModuleGroups::new(module_id, &db_entry).ok()?; if let Some(module) = module.parent_module() { if let Some(parent_groups) = self.lookup_module_group(db, &module) { parent_groups.submodules.push(groups.content.clone_ref()) diff --git a/app/gui/src/controller/searcher/component/group.rs b/app/gui/src/controller/searcher/component/group.rs index 4896a6f3bf..d5ce771354 100644 --- a/app/gui/src/controller/searcher/component/group.rs +++ b/app/gui/src/controller/searcher/component/group.rs @@ -76,7 +76,7 @@ pub struct Group { impl Deref for Group { type Target = Data; fn deref(&self) -> &Self::Target { - &*self.data + &self.data } } @@ -206,7 +206,7 @@ impl Group { fn sort_by_match(&self) { let mut entries = self.entries.borrow_mut(); entries.sort_by(|a, b| { - Self::entry_match_ordering(&*a.match_info.borrow(), &*b.match_info.borrow()) + Self::entry_match_ordering(&a.match_info.borrow(), &b.match_info.borrow()) }); } diff --git a/app/gui/src/controller/searcher/component/hardcoded.rs b/app/gui/src/controller/searcher/component/hardcoded.rs index 0906941417..67e0ce3cb4 100644 --- a/app/gui/src/controller/searcher/component/hardcoded.rs +++ b/app/gui/src/controller/searcher/component/hardcoded.rs @@ -106,7 +106,7 @@ impl Snippet { /// documentation parser cannot be created or the argument fails to parse as valid /// documentation. fn with_documentation(mut self, documentation: &str) -> Self { - let doc_parser = parser::DocParser::new().unwrap(); + let doc_parser = parser_scala::DocParser::new().unwrap(); let doc_string = documentation.to_string(); let documentation_html = doc_parser.generate_html_doc_pure(doc_string); self.documentation_html = Some(documentation_html.unwrap()); diff --git a/app/gui/src/controller/text.rs b/app/gui/src/controller/text.rs index 14a53ccf64..1722cb8af4 100644 --- a/app/gui/src/controller/text.rs +++ b/app/gui/src/controller/text.rs @@ -78,7 +78,7 @@ impl Handle { /// Get clone of file path handled by this controller. pub fn file_path(&self) -> &FilePath { match &self.file { - FileHandle::PlainText { path, .. } => &*path, + FileHandle::PlainText { path, .. } => path, FileHandle::Module { controller } => controller.model.path().file_path(), } } @@ -177,7 +177,7 @@ mod test { use crate::executor::test_utils::TestWithLocalPoolExecutor; use enso_text::index::*; - use parser::Parser; + use parser_scala::Parser; use wasm_bindgen_test::wasm_bindgen_test; fn setup_mock_project(setup: impl FnOnce(&mut model::project::MockAPI)) -> model::Project { @@ -233,7 +233,7 @@ mod test { #[wasm_bindgen_test] fn obtain_text_controller_for_module() { - let parser = parser::Parser::new_or_panic(); + let parser = parser_scala::Parser::new_or_panic(); TestWithLocalPoolExecutor::set_up().run_task(async move { let code = "2 + 2".to_string(); let undo = default(); diff --git a/app/gui/src/controller/upload.rs b/app/gui/src/controller/upload.rs index 0debb6792d..f33d5d4153 100644 --- a/app/gui/src/controller/upload.rs +++ b/app/gui/src/controller/upload.rs @@ -287,7 +287,7 @@ impl NodeFromDroppedFileHandler { } async fn establish_remote_file_name(&self, original_name: &str) -> FallibleResult { - pick_non_colliding_name(&*self.project.json_rpc(), &self.data_path(), original_name).await + pick_non_colliding_name(&self.project.json_rpc(), &self.data_path(), original_name).await } async fn ensure_data_directory_exists(&self) -> FallibleResult { diff --git a/app/gui/src/lib.rs b/app/gui/src/lib.rs index 188bf93b35..63a3cf1782 100644 --- a/app/gui/src/lib.rs +++ b/app/gui/src/lib.rs @@ -32,22 +32,21 @@ #![feature(arc_unwrap_or_clone)] #![feature(async_closure)] #![feature(associated_type_bounds)] -#![feature(bool_to_option)] #![feature(cell_update)] #![feature(drain_filter)] #![feature(exact_size_is_empty)] #![feature(iter_order_by)] #![feature(option_result_contains)] #![feature(trait_alias)] -#![feature(result_into_ok_or_err)] #![feature(result_option_inspect)] #![feature(map_try_insert)] #![feature(assert_matches)] -#![feature(cell_filter_map)] #![feature(hash_drain_filter)] +#![feature(unwrap_infallible)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/app/gui/src/model/execution_context.rs b/app/gui/src/model/execution_context.rs index 1bb4f128d3..67d368b1e6 100644 --- a/app/gui/src/model/execution_context.rs +++ b/app/gui/src/model/execution_context.rs @@ -175,7 +175,7 @@ impl ComputedValueInfoRegistry { /// Binary data can be accessed through `Deref` or `AsRef` implementations. /// /// The inner storage is private and users should not make any assumptions about it. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct VisualizationUpdateData(Vec); impl VisualizationUpdateData { @@ -302,7 +302,7 @@ impl From<&QualifiedMethodPointer> for MethodPointer { pub type VisualizationId = Uuid; /// Description of the visualization setup. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct Visualization { /// Unique identifier of this visualization. pub id: VisualizationId, diff --git a/app/gui/src/model/module.rs b/app/gui/src/model/module.rs index 3e5399d166..29e812e37b 100644 --- a/app/gui/src/model/module.rs +++ b/app/gui/src/model/module.rs @@ -12,9 +12,9 @@ use double_representation::module::ImportId; use double_representation::project; use engine_protocol::language_server::MethodPointer; use flo_stream::Subscriber; -use parser::api::ParsedSourceFile; -use parser::api::SourceFile; -use parser::Parser; +use parser_scala::api::ParsedSourceFile; +use parser_scala::api::SourceFile; +use parser_scala::Parser; use serde::Deserialize; use serde::Serialize; @@ -182,7 +182,7 @@ impl Path { /// Get the file path. pub fn file_path(&self) -> &FilePath { - &*self.file_path + &self.file_path } /// Gives the file name for the given module name. @@ -342,7 +342,7 @@ pub struct Metadata { rest: serde_json::Value, } -impl parser::api::Metadata for Metadata {} +impl parser_scala::api::Metadata for Metadata {} impl Default for Metadata { fn default() -> Self { @@ -356,7 +356,7 @@ impl Default for Metadata { } /// Project-level metadata. It is stored as part of the project's main module's metadata. -#[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Default, Deserialize, PartialEq, Eq, Serialize)] pub struct ProjectMetadata { /// The execution context of the displayed graph editor. #[serde(default, deserialize_with = "enso_prelude::deserialize_or_default")] @@ -746,7 +746,8 @@ pub mod test { pub fn plain_from_code(code: impl Into) -> Module { let urm = default(); - MockData { code: code.into(), ..default() }.plain(&parser::Parser::new_or_panic(), urm) + MockData { code: code.into(), ..default() } + .plain(&parser_scala::Parser::new_or_panic(), urm) } #[test] diff --git a/app/gui/src/model/module/plain.rs b/app/gui/src/model/module/plain.rs index 513ca5b313..39d1711042 100644 --- a/app/gui/src/model/module/plain.rs +++ b/app/gui/src/model/module/plain.rs @@ -18,9 +18,9 @@ use crate::notification; use double_representation::definition::DefinitionInfo; use double_representation::module::ImportId; use flo_stream::Subscriber; -use parser::api::ParsedSourceFile; -use parser::api::SourceFile; -use parser::Parser; +use parser_scala::api::ParsedSourceFile; +use parser_scala::api::SourceFile; +use parser_scala::Parser; diff --git a/app/gui/src/model/module/synchronized.rs b/app/gui/src/model/module/synchronized.rs index c6c4ed8050..5031c59f38 100644 --- a/app/gui/src/model/module/synchronized.rs +++ b/app/gui/src/model/module/synchronized.rs @@ -24,8 +24,8 @@ use enso_text::text; use enso_text::Location; use enso_text::Range; use flo_stream::Subscriber; -use parser::api::SourceFile; -use parser::Parser; +use parser_scala::api::SourceFile; +use parser_scala::Parser; diff --git a/app/gui/src/model/project.rs b/app/gui/src/model/project.rs index 5d0b12dee8..bcca309517 100644 --- a/app/gui/src/model/project.rs +++ b/app/gui/src/model/project.rs @@ -14,7 +14,7 @@ use engine_protocol::language_server; use engine_protocol::language_server::ContentRoot; use flo_stream::Subscriber; use mockall::automock; -use parser::Parser; +use parser_scala::Parser; use uuid::Uuid; @@ -168,14 +168,14 @@ pub type Synchronized = synchronized::Project; // ==================== /// Notification emitted by the project model. -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Notification { /// One of the backend connections has been lost. ConnectionLost(BackendConnection), } /// Denotes one of backend connections used by a project. -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum BackendConnection { /// The text connection used to transfer JSON messages. LanguageServerJson, diff --git a/app/gui/src/model/project/synchronized.rs b/app/gui/src/model/project/synchronized.rs index 9698fe4d87..f5b3f45d2f 100644 --- a/app/gui/src/model/project/synchronized.rs +++ b/app/gui/src/model/project/synchronized.rs @@ -24,7 +24,7 @@ use engine_protocol::project_manager; use engine_protocol::project_manager::MissingComponentAction; use engine_protocol::project_manager::ProjectName; use flo_stream::Subscriber; -use parser::Parser; +use parser_scala::Parser; @@ -279,7 +279,7 @@ impl Project { let language_server = &*language_server_rpc; let suggestion_db = SuggestionDatabase::create_synchronized(language_server); let suggestion_db = Rc::new(suggestion_db.await.map_err(&wrap)?); - let content_roots = ContentRoots::new_from_connection(&logger, &*language_server); + let content_roots = ContentRoots::new_from_connection(&logger, language_server); let content_roots = Rc::new(content_roots); let notifications = notification::Publisher::default(); let urm = Rc::new(model::undo_redo::Manager::new(&logger)); diff --git a/app/gui/src/model/suggestion_database.rs b/app/gui/src/model/suggestion_database.rs index b449b471e1..bdce84df61 100644 --- a/app/gui/src/model/suggestion_database.rs +++ b/app/gui/src/model/suggestion_database.rs @@ -117,7 +117,7 @@ pub struct NoSuchEntry(pub SuggestionId); // ==================== /// Notification about change in a suggestion database, -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Notification { /// The database has been updated. Updated, diff --git a/app/gui/src/model/suggestion_database/entry.rs b/app/gui/src/model/suggestion_database/entry.rs index 0498ec3b14..928d1f3e78 100644 --- a/app/gui/src/model/suggestion_database/entry.rs +++ b/app/gui/src/model/suggestion_database/entry.rs @@ -75,7 +75,7 @@ im_string_newtype! { } /// A fully qualified name of an [`Entry`]. -#[derive(Debug, Default, Clone, PartialEq)] +#[derive(Debug, Default, Clone, PartialEq, Eq)] #[allow(missing_docs)] pub struct QualifiedName { pub segments: Vec, @@ -514,7 +514,7 @@ impl Entry { docs_html } else { docs.map(|docs| { - let parser = parser::DocParser::new(); + let parser = parser_scala::DocParser::new(); match parser { Ok(p) => { let output = p.generate_html_doc_pure((*docs).to_string()); @@ -718,7 +718,7 @@ where I: IntoIterator { use language_server::types::DocSection; doc_sections.into_iter().find_map(|section| match section { DocSection::Keyed { key, body } if key == ICON_DOC_SECTION_KEY => { - let icon_name = IconName::from_snake_case(&body); + let icon_name = IconName::from_snake_case(body); let as_snake_case = icon_name.to_snake_case(); if as_snake_case.as_str() != body.as_str() || !body.is_case(Case::Snake) { let msg = format!( @@ -960,7 +960,7 @@ mod test { /// Test the results of converting a [`QualifiedName`] to a string using various methods. #[test] fn qualified_name_to_string() { - let qualified_name = QualifiedName::from_iter(&["Foo", "Bar"]); + let qualified_name = QualifiedName::from_iter(["Foo", "Bar"]); assert_eq!(qualified_name.to_string(), "Foo.Bar".to_string()); assert_eq!(String::from(qualified_name), "Foo.Bar".to_string()); } diff --git a/app/gui/src/model/suggestion_database/example.rs b/app/gui/src/model/suggestion_database/example.rs index cd1aa61c05..31926f12c1 100644 --- a/app/gui/src/model/suggestion_database/example.rs +++ b/app/gui/src/model/suggestion_database/example.rs @@ -5,7 +5,7 @@ use crate::prelude::*; use double_representation::definition; use double_representation::definition::DefinitionName; use double_representation::module; -use parser::Parser; +use parser_scala::Parser; @@ -83,7 +83,7 @@ impl Example { /// Creates a pretty documentation from hardcoded inner text. pub fn documentation_html_from(inner: &str) -> String { - return format!("

{}

", inner); + format!("

{}

", inner) } // ========================= diff --git a/app/gui/src/model/undo_redo.rs b/app/gui/src/model/undo_redo.rs index 00d51ea694..bce1a11eff 100644 --- a/app/gui/src/model/undo_redo.rs +++ b/app/gui/src/model/undo_redo.rs @@ -275,7 +275,10 @@ impl Repository { /// Get currently opened transaction. If there is none, open a new one. pub fn transaction(self: &Rc, name: impl Into) -> Rc { - self.open_transaction(name).into_ok_or_err() + match self.open_transaction(name) { + Ok(transaction) => transaction, + Err(transaction) => transaction, + } } /// Borrow given stack. diff --git a/app/gui/src/presenter/graph.rs b/app/gui/src/presenter/graph.rs index e249d01e32..2817a426fa 100644 --- a/app/gui/src/presenter/graph.rs +++ b/app/gui/src/presenter/graph.rs @@ -489,7 +489,7 @@ impl Graph { // Position initialization should go before emitting `update_data` event. update_with_gap <- view.default_y_gap_between_nodes.sample(&update_view); eval update_with_gap ((gap) model.initialize_nodes_positions(*gap)); - update_data <- update_view.map(f_!([model] match ViewUpdate::new(&*model) { + update_data <- update_view.map(f_!([model] match ViewUpdate::new(&model) { Ok(update) => Rc::new(update), Err(err) => { error!("Failed to update view: {err:?}"); diff --git a/app/gui/src/presenter/graph/state.rs b/app/gui/src/presenter/graph/state.rs index 12e5df48ff..92f3942d8d 100644 --- a/app/gui/src/presenter/graph/state.rs +++ b/app/gui/src/presenter/graph/state.rs @@ -717,7 +717,7 @@ impl<'a> ViewChange<'a> { mod tests { use super::*; use engine_protocol::language_server::MethodPointer; - use parser::Parser; + use parser_scala::Parser; fn create_test_node(expression: &str) -> controller::graph::Node { let parser = Parser::new_or_panic(); diff --git a/app/gui/src/presenter/graph/visualization/manager.rs b/app/gui/src/presenter/graph/visualization/manager.rs index 1d4923c11b..59b8398900 100644 --- a/app/gui/src/presenter/graph/visualization/manager.rs +++ b/app/gui/src/presenter/graph/visualization/manager.rs @@ -74,7 +74,7 @@ pub enum Notification { // ============== /// Describes the state of the visualization on the Language Server. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] #[allow(clippy::large_enum_variant)] pub enum Status { /// Not attached and no ongoing background work. @@ -156,7 +156,7 @@ impl Default for Status { /// Desired visualization described using unresolved view metadata structure. #[allow(missing_docs)] -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct Desired { pub visualization_id: VisualizationId, pub expression_id: ast::Id, diff --git a/app/gui/src/presenter/searcher/provider.rs b/app/gui/src/presenter/searcher/provider.rs index 150719aec5..205d22848c 100644 --- a/app/gui/src/presenter/searcher/provider.rs +++ b/app/gui/src/presenter/searcher/provider.rs @@ -186,7 +186,7 @@ impl list_view::entry::ModelProvider for Component let is_enterable = component.can_be_entered(); let match_info = component.match_info.borrow(); let label = component.label(); - let highlighted = bytes_of_matched_letters(&*match_info, &label); + let highlighted = bytes_of_matched_letters(&match_info, &label); let icon = match component.data { component::Data::FromDatabase { entry, .. } => { let kind = entry.kind; diff --git a/app/gui/src/test.rs b/app/gui/src/test.rs index 34fde38df6..7259dc4d5a 100644 --- a/app/gui/src/test.rs +++ b/app/gui/src/test.rs @@ -139,7 +139,7 @@ pub mod mock { pub module_path: model::module::Path, pub suggestions: HashMap, pub context_id: model::execution_context::Id, - pub parser: parser::Parser, + pub parser: parser_scala::Parser, code: String, id_map: ast::IdMap, metadata: crate::model::module::Metadata, @@ -177,7 +177,7 @@ pub mod mock { metadata: default(), context_id: CONTEXT_ID, root_definition: definition_name(), - parser: parser::Parser::new_or_panic(), + parser: parser_scala::Parser::new_or_panic(), logger, } } diff --git a/app/gui/src/transport/web.rs b/app/gui/src/transport/web.rs index 8b2106f55f..0ee6bc72ad 100644 --- a/app/gui/src/transport/web.rs +++ b/app/gui/src/transport/web.rs @@ -66,7 +66,7 @@ impl SendingError { // ============= /// Describes the current state of WebSocket connection. -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum State { /// Socket has been created. The connection is not yet open. Connecting, diff --git a/app/gui/tests/language_server.rs b/app/gui/tests/language_server.rs index 4c74120dd0..2bf56f3bd3 100644 --- a/app/gui/tests/language_server.rs +++ b/app/gui/tests/language_server.rs @@ -323,8 +323,8 @@ async fn file_operations_test() { // Edit file using the text protocol let path = Path::new(project.json_rpc().project_root().id(), &["test_file.txt"]); let contents = "Hello, 世界!".to_string(); - let written = project.json_rpc().write_file(&path, &contents).await.unwrap(); - info!("Written: {written:?}"); + project.json_rpc().write_file(&path, &contents).await.unwrap(); + info!("Written: {contents:?}"); let read_back = project.json_rpc().read_file(&path).await.unwrap(); info!("Read back: {read_back:?}"); assert_eq!(contents, read_back.contents); diff --git a/app/gui/view/Cargo.toml b/app/gui/view/Cargo.toml index a52a4c01cc..91c6a0fbd1 100644 --- a/app/gui/view/Cargo.toml +++ b/app/gui/view/Cargo.toml @@ -23,7 +23,7 @@ ensogl-text-msdf = { path = "../../../lib/rust/ensogl/component/text/src/font/ms ensogl-hardcoded-theme = { path = "../../../lib/rust/ensogl/app/theme/hardcoded" } ide-view-component-browser = { path = "component-browser" } ide-view-graph-editor = { path = "graph-editor" } -parser = { path = "../language/parser" } +parser-scala = { path = "../language/parser" } span-tree = { path = "../language/span-tree" } js-sys = { version = "0.3.28" } multi-map = { version = "1.3.0" } diff --git a/app/gui/view/component-browser/breadcrumbs/src/lib.rs b/app/gui/view/component-browser/breadcrumbs/src/lib.rs index e6b9c52e1d..9769268681 100644 --- a/app/gui/view/component-browser/breadcrumbs/src/lib.rs +++ b/app/gui/view/component-browser/breadcrumbs/src/lib.rs @@ -20,13 +20,12 @@ #![recursion_limit = "1024"] // === Features === #![feature(option_result_contains)] -#![feature(derive_default_enum)] #![feature(trait_alias)] #![feature(hash_drain_filter)] -#![feature(bool_to_option)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -528,7 +527,7 @@ impl ensogl_core::application::View for Breadcrumbs { fn default_shortcuts() -> Vec { use ensogl_core::application::shortcut::ActionType::*; - (&[(Press, "shift enter", "move_up"), (Press, "ctrl shift enter", "move_down")]) + [(Press, "shift enter", "move_up"), (Press, "ctrl shift enter", "move_down")] .iter() .map(|(a, b, c)| Self::self_shortcut(*a, *b, *c)) .collect() diff --git a/app/gui/view/component-browser/component-group/src/lib.rs b/app/gui/view/component-browser/component-group/src/lib.rs index d67bf89d2d..3b193f20d7 100644 --- a/app/gui/view/component-browser/component-group/src/lib.rs +++ b/app/gui/view/component-browser/component-group/src/lib.rs @@ -43,10 +43,10 @@ #![recursion_limit = "512"] // === Features === #![feature(option_result_contains)] -#![feature(derive_default_enum)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -567,7 +567,7 @@ impl component::Frp for Frp { fn default_shortcuts() -> Vec { use ensogl::application::shortcut::ActionType::*; - (&[(Press, "tab", "accept_suggestion")]) + [(Press, "tab", "accept_suggestion")] .iter() .map(|(a, b, c)| View::self_shortcut(*a, *b, *c)) .collect() diff --git a/app/gui/view/component-browser/component-group/src/wide.rs b/app/gui/view/component-browser/component-group/src/wide.rs index a4988015b3..0a430fe73a 100644 --- a/app/gui/view/component-browser/component-group/src/wide.rs +++ b/app/gui/view/component-browser/component-group/src/wide.rs @@ -275,7 +275,7 @@ impl component::Frp> for Frp { fn default_shortcuts() -> Vec { use ensogl::application::shortcut::ActionType::*; - (&[(Press, "tab", "accept_suggestion")]) + [(Press, "tab", "accept_suggestion")] .iter() .map(|(a, b, c)| View::::self_shortcut(*a, *b, *c)) .collect() diff --git a/app/gui/view/component-browser/searcher-list-panel/src/lib.rs b/app/gui/view/component-browser/searcher-list-panel/src/lib.rs index 6224528cc6..4b37463db7 100644 --- a/app/gui/view/component-browser/searcher-list-panel/src/lib.rs +++ b/app/gui/view/component-browser/searcher-list-panel/src/lib.rs @@ -9,7 +9,6 @@ // === Features === #![allow(incomplete_features)] #![feature(associated_type_defaults)] -#![feature(bool_to_option)] #![feature(cell_update)] #![feature(const_type_id)] #![feature(drain_filter)] @@ -22,7 +21,6 @@ #![feature(unboxed_closures)] #![feature(trace_macros)] #![feature(const_trait_impl)] -#![feature(derive_default_enum)] #![feature(slice_as_chunks)] #![feature(option_result_contains)] #![feature(int_roundings)] @@ -30,6 +28,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/app/gui/view/component-browser/src/lib.rs b/app/gui/view/component-browser/src/lib.rs index 58ab3515a7..05e57d19cf 100644 --- a/app/gui/view/component-browser/src/lib.rs +++ b/app/gui/view/component-browser/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/app/gui/view/debug_scene/component-group/src/lib.rs b/app/gui/view/debug_scene/component-group/src/lib.rs index 7adb184484..e1d6bdc166 100644 --- a/app/gui/view/debug_scene/component-group/src/lib.rs +++ b/app/gui/view/debug_scene/component-group/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -241,9 +242,9 @@ mod transparent_circle { } fn init(app: &Application) { - theme::builtin::dark::register(&app); - theme::builtin::light::register(&app); - theme::builtin::light::enable(&app); + theme::builtin::dark::register(app); + theme::builtin::light::register(app); + theme::builtin::light::enable(app); // === Layers setup === diff --git a/app/gui/view/debug_scene/component-list-panel-view/src/lib.rs b/app/gui/view/debug_scene/component-list-panel-view/src/lib.rs index 8260361780..c0638769a6 100644 --- a/app/gui/view/debug_scene/component-list-panel-view/src/lib.rs +++ b/app/gui/view/debug_scene/component-list-panel-view/src/lib.rs @@ -4,7 +4,6 @@ // === Features === #![allow(incomplete_features)] #![feature(associated_type_defaults)] -#![feature(bool_to_option)] #![feature(cell_update)] #![feature(const_type_id)] #![feature(drain_filter)] @@ -21,6 +20,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] @@ -198,8 +198,8 @@ fn init_local_scope_section(searcher_list_panel: &ComponentBrowserPanel) { pub fn main() { ensogl_text_msdf::run_once_initialized(|| { let app = &Application::new("root"); - theme::builtin::light::register(&app); - theme::builtin::light::enable(&app); + theme::builtin::light::register(app); + theme::builtin::light::enable(app); let world = &app.display; let scene = &world.default_scene; diff --git a/app/gui/view/debug_scene/icons/src/lib.rs b/app/gui/view/debug_scene/icons/src/lib.rs index 10dbe7db1d..5266da38d3 100644 --- a/app/gui/view/debug_scene/icons/src/lib.rs +++ b/app/gui/view/debug_scene/icons/src/lib.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use ensogl::system::web::traits::*; diff --git a/app/gui/view/debug_scene/interface/Cargo.toml b/app/gui/view/debug_scene/interface/Cargo.toml index 66fe82a7f9..c989f483e7 100644 --- a/app/gui/view/debug_scene/interface/Cargo.toml +++ b/app/gui/view/debug_scene/interface/Cargo.toml @@ -14,7 +14,7 @@ ensogl = { path = "../../../../../lib/rust/ensogl" } ensogl-hardcoded-theme = { path = "../../../../../lib/rust/ensogl/app/theme/hardcoded" } ensogl-text-msdf = { path = "../../../../../lib/rust/ensogl/component/text/src/font/msdf" } ide-view = { path = "../.." } -parser = { path = "../../../language/parser" } +parser-scala = { path = "../../../language/parser" } span-tree = { path = "../../../language/span-tree" } uuid = { version = "0.8", features = ["v4", "wasm-bindgen"] } wasm-bindgen = { version = "0.2.78", features = ["nightly"] } diff --git a/app/gui/view/debug_scene/interface/src/lib.rs b/app/gui/view/debug_scene/interface/src/lib.rs index 6fdd3e17e4..facf112be7 100644 --- a/app/gui/view/debug_scene/interface/src/lib.rs +++ b/app/gui/view/debug_scene/interface/src/lib.rs @@ -5,6 +5,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -37,7 +38,7 @@ use ide_view::graph_editor::Type; use ide_view::project; use ide_view::root; use ide_view::status_bar; -use parser::Parser; +use parser_scala::Parser; use uuid::Uuid; diff --git a/app/gui/view/debug_scene/new-component-list-panel-view/src/lib.rs b/app/gui/view/debug_scene/new-component-list-panel-view/src/lib.rs index c133ec9fcc..790d8e5a3f 100644 --- a/app/gui/view/debug_scene/new-component-list-panel-view/src/lib.rs +++ b/app/gui/view/debug_scene/new-component-list-panel-view/src/lib.rs @@ -5,7 +5,6 @@ #![allow(incomplete_features)] #![feature(negative_impls)] #![feature(associated_type_defaults)] -#![feature(bool_to_option)] #![feature(cell_update)] #![feature(const_type_id)] #![feature(drain_filter)] @@ -22,6 +21,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/app/gui/view/debug_scene/src/lib.rs b/app/gui/view/debug_scene/src/lib.rs index 5a53536cf0..61f29ba442 100644 --- a/app/gui/view/debug_scene/src/lib.rs +++ b/app/gui/view/debug_scene/src/lib.rs @@ -6,6 +6,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/app/gui/view/debug_scene/visualization/src/lib.rs b/app/gui/view/debug_scene/visualization/src/lib.rs index 96825cbc91..f023536b43 100644 --- a/app/gui/view/debug_scene/visualization/src/lib.rs +++ b/app/gui/view/debug_scene/visualization/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/app/gui/view/graph-editor/src/component/add_node_button.rs b/app/gui/view/graph-editor/src/component/add_node_button.rs index 0392b8b8c6..27122a28c9 100644 --- a/app/gui/view/graph-editor/src/component/add_node_button.rs +++ b/app/gui/view/graph-editor/src/component/add_node_button.rs @@ -27,7 +27,7 @@ mod shape { let angle = Radians::from(90.0.degrees()); let bar_length = &radius * 4.0 / 3.0; let bar_width = &bar_length / 10.0; - #[allow(clippy::blacklisted_name)] // The `bar` name here is totally legit. + #[allow(clippy::disallowed_names)] // The `bar` name here is totally legit. let bar = Rect((bar_length, &bar_width)); let plus = (bar.rotate(angle) + bar).into(); let shape = shape(background_color, icon_color, plus, radius); diff --git a/app/gui/view/graph-editor/src/component/breadcrumbs/breadcrumb.rs b/app/gui/view/graph-editor/src/component/breadcrumbs/breadcrumb.rs index c26a13ab6b..aca809b40e 100644 --- a/app/gui/view/graph-editor/src/component/breadcrumbs/breadcrumb.rs +++ b/app/gui/view/graph-editor/src/component/breadcrumbs/breadcrumb.rs @@ -309,7 +309,7 @@ impl BreadcrumbModel { .shape_system(scene, PhantomData::) .shape_system .symbol; - scene.layers.panel.add_exclusive(&background); + scene.layers.panel.add_exclusive(background); scene.layers.panel.add_exclusive(&icon); scene.layers.panel.add_exclusive(&separator); diff --git a/app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs b/app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs index 4bfc4d0d7d..89378c098d 100644 --- a/app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs +++ b/app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs @@ -386,13 +386,13 @@ impl View for ProjectName { fn default_shortcuts() -> Vec { use shortcut::ActionType::*; - (&[ + [ (Press, "", "enter", "commit"), (Release, "", "escape", "cancel_editing"), (DoublePress, "is_hovered", "left-mouse-button", "start_editing"), - ]) - .iter() - .map(|(a, b, c, d)| Self::self_shortcut_when(*a, *c, *d, *b)) - .collect() + ] + .iter() + .map(|(a, b, c, d)| Self::self_shortcut_when(*a, *c, *d, *b)) + .collect() } } diff --git a/app/gui/view/graph-editor/src/component/node/error.rs b/app/gui/view/graph-editor/src/component/node/error.rs index a3e2855725..67f2eac622 100644 --- a/app/gui/view/graph-editor/src/component/node/error.rs +++ b/app/gui/view/graph-editor/src/component/node/error.rs @@ -134,7 +134,7 @@ impl Container { /// Move the container with visualization to `layer`. pub fn set_layer(&self, layer: visualization::Layer) { - self.visualization.frp.set_layer.emit(&layer); + self.visualization.frp.set_layer.emit(layer); layer.apply_for_html_component(&self.scene, &self.background_dom); } } diff --git a/app/gui/view/graph-editor/src/component/profiling.rs b/app/gui/view/graph-editor/src/component/profiling.rs index 371fbefb17..97bc2d55c3 100644 --- a/app/gui/view/graph-editor/src/component/profiling.rs +++ b/app/gui/view/graph-editor/src/component/profiling.rs @@ -83,7 +83,7 @@ mod icon { // === Needle === let needle = UnevenCapsule(needle_radius_outer,needle_radius_inner,needle_length); - let needle = needle.rotate(&needle_angle); + let needle = needle.rotate(needle_angle); let inner_circle = Circle(&inner_circle_radius); diff --git a/app/gui/view/graph-editor/src/lib.rs b/app/gui/view/graph-editor/src/lib.rs index 65ff42c891..8d28be815b 100644 --- a/app/gui/view/graph-editor/src/lib.rs +++ b/app/gui/view/graph-editor/src/lib.rs @@ -15,6 +15,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] #![allow(incomplete_features)] // To be removed, see: https://github.com/enso-org/ide/issues/1559 #![warn(missing_copy_implementations)] @@ -70,7 +71,7 @@ use ensogl::system::web; use ensogl::system::web::traits::*; use ensogl::Animation; use ensogl::DEPRECATED_Animation; -use ensogl::DEPRECATED_Tween; +use ensogl::Easing; use ensogl_component::tooltip::Tooltip; use ensogl_hardcoded_theme as theme; @@ -1649,7 +1650,7 @@ impl GraphEditorModelWithNetwork { preprocessor: node_model.visualization.frp.preprocessor.value(), }; metadata.emit(initial_metadata); - init.emit(&()); + init.emit(()); self.nodes.insert(node_id, node.clone_ref()); node } @@ -2510,7 +2511,7 @@ impl application::View for GraphEditor { fn default_shortcuts() -> Vec { use shortcut::ActionType::*; - (&[ + [ (Press, "!node_editing", "tab", "start_node_creation"), // === Drag === (Press, "", "left-mouse-button", "node_press"), @@ -2564,10 +2565,10 @@ impl application::View for GraphEditor { (Press, "debug_mode", "ctrl shift enter", "debug_push_breadcrumb"), (Press, "debug_mode", "ctrl shift up", "debug_pop_breadcrumb"), (Press, "debug_mode", "ctrl n", "add_node_at_cursor"), - ]) - .iter() - .map(|(a, b, c, d)| Self::self_shortcut_when(*a, *c, *d, *b)) - .collect() + ] + .iter() + .map(|(a, b, c, d)| Self::self_shortcut_when(*a, *c, *d, *b)) + .collect() } } @@ -3139,18 +3140,18 @@ fn new_graph_editor(app: &Application) -> GraphEditor { eval drag_tgts ((ids) model.disable_grid_snapping_for(ids)); let node_tgt_pos_anim = DEPRECATED_Animation::>::new(network); - let x_snap_strength = DEPRECATED_Tween::new(network); - let y_snap_strength = DEPRECATED_Tween::new(network); - x_snap_strength.set_duration(300.0.ms()); - y_snap_strength.set_duration(300.0.ms()); + let x_snap_strength = Easing::new(network); + let y_snap_strength = Easing::new(network); + x_snap_strength.set_duration(300.0); + y_snap_strength.set_duration(300.0); _eval <- node_tgt_pos_rt.map2(&just_pressed, f!([model,x_snap_strength,y_snap_strength,node_tgt_pos_anim](pos,just_pressed) { let snapped = model.nodes.check_grid_magnet(*pos); let x = snapped.x.unwrap_or(pos.x); let y = snapped.y.unwrap_or(pos.y); - x_snap_strength.set_target_value(if snapped.x.is_none() { 0.0 } else { 1.0 }); - y_snap_strength.set_target_value(if snapped.y.is_none() { 0.0 } else { 1.0 }); + x_snap_strength.target(if snapped.x.is_none() { 0.0 } else { 1.0 }); + y_snap_strength.target(if snapped.y.is_none() { 0.0 } else { 1.0 }); node_tgt_pos_anim.set_target_value(Vector2::new(x,y)); if *just_pressed { node_tgt_pos_anim.set_target_value(*pos); diff --git a/app/gui/view/graph-editor/src/new_node_position.rs b/app/gui/view/graph-editor/src/new_node_position.rs index b7b752d96a..753a6b4496 100644 --- a/app/gui/view/graph-editor/src/new_node_position.rs +++ b/app/gui/view/graph-editor/src/new_node_position.rs @@ -283,7 +283,7 @@ const MAGNET_ALIGNMENT_THRESHOLD: f32 = 150.0; /// The direction in which the Magnet Alignment algorithm will be applied. /// /// See [`magnet_alignment`]. -#[derive(Debug, Clone, Copy, PartialEq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[allow(missing_docs)] pub enum MagnetAlignmentDirection { HorizontallyAndVertically, diff --git a/app/gui/view/graph-editor/src/profiling.rs b/app/gui/view/graph-editor/src/profiling.rs index f443550c3e..b86e87ccda 100644 --- a/app/gui/view/graph-editor/src/profiling.rs +++ b/app/gui/view/graph-editor/src/profiling.rs @@ -75,12 +75,12 @@ impl Statuses { durations.borrow_mut().remove_by_left(node); }, }; - Self::min_and_max(&*durations.borrow()) + Self::min_and_max(&durations.borrow()) })); min_and_max_from_remove <- frp.remove.map(f!([durations](node) { durations.borrow_mut().remove_by_left(node); - Self::min_and_max(&*durations.borrow()) + Self::min_and_max(&durations.borrow()) })); min_and_max <- any(&min_and_max_from_set,&min_and_max_from_remove); diff --git a/app/gui/view/src/code_editor.rs b/app/gui/view/src/code_editor.rs index 17a0829db7..b91ec23420 100644 --- a/app/gui/view/src/code_editor.rs +++ b/app/gui/view/src/code_editor.rs @@ -152,7 +152,7 @@ impl application::View for View { fn default_shortcuts() -> Vec { use shortcut::ActionType::*; - (&[(Press, "ctrl `", "toggle"), (Press, "escape", "hide")]) + [(Press, "ctrl `", "toggle"), (Press, "escape", "hide")] .iter() .map(|(a, b, c)| Self::self_shortcut(*a, *b, *c)) .collect() diff --git a/app/gui/view/src/lib.rs b/app/gui/view/src/lib.rs index f99eacdcce..21f5261cb5 100644 --- a/app/gui/view/src/lib.rs +++ b/app/gui/view/src/lib.rs @@ -16,6 +16,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] #![allow(incomplete_features)] // To be removed, see: https://github.com/enso-org/ide/issues/1559 #![warn(missing_copy_implementations)] diff --git a/app/gui/view/src/project.rs b/app/gui/view/src/project.rs index a082026de4..313302bbe8 100644 --- a/app/gui/view/src/project.rs +++ b/app/gui/view/src/project.rs @@ -821,7 +821,7 @@ impl application::View for View { fn default_shortcuts() -> Vec { use shortcut::ActionType::*; - (&[ + [ (Press, "!is_searcher_opened", "cmd o", "show_open_dialog"), (Press, "is_searcher_opened", "escape", "close_searcher"), (Press, "open_dialog_shown", "escape", "close_open_dialog"), @@ -834,9 +834,9 @@ impl application::View for View { (Press, "", "cmd y", "redo"), (Press, "!debug_mode", DEBUG_MODE_SHORTCUT, "enable_debug_mode"), (Press, "debug_mode", DEBUG_MODE_SHORTCUT, "disable_debug_mode"), - ]) - .iter() - .map(|(a, b, c, d)| Self::self_shortcut_when(*a, *c, *d, *b)) - .collect() + ] + .iter() + .map(|(a, b, c, d)| Self::self_shortcut_when(*a, *c, *d, *b)) + .collect() } } diff --git a/app/gui/view/src/searcher.rs b/app/gui/view/src/searcher.rs index 600d5d2dc1..2e902210f6 100644 --- a/app/gui/view/src/searcher.rs +++ b/app/gui/view/src/searcher.rs @@ -306,7 +306,7 @@ impl application::View for View { } fn default_shortcuts() -> Vec { use shortcut::ActionType::*; - (&[(Press, "tab", "use_as_suggestion")]) + [(Press, "tab", "use_as_suggestion")] .iter() .map(|(a, b, c)| Self::self_shortcut(*a, *b, *c)) .collect() diff --git a/app/gui/view/src/window_control_buttons/close.rs b/app/gui/view/src/window_control_buttons/close.rs index d528d8b160..3055a833ba 100644 --- a/app/gui/view/src/window_control_buttons/close.rs +++ b/app/gui/view/src/window_control_buttons/close.rs @@ -26,7 +26,7 @@ pub mod shape { let angle = Radians::from(45.0.degrees()); let bar_length = &radius * 4.0 / 3.0; let bar_width = &bar_length / 6.5; - #[allow(clippy::blacklisted_name)] // The `bar` name here is totally legit. + #[allow(clippy::disallowed_names)] // The `bar` name here is totally legit. let bar = Rect((bar_length, &bar_width)).corners_radius(bar_width); let cross = (bar.rotate(angle) + bar.rotate(-angle)).into(); shape(background_color, icon_color, cross, radius) diff --git a/app/gui/view/welcome-screen/src/lib.rs b/app/gui/view/welcome-screen/src/lib.rs index 5e9c782d72..0fccdb7e97 100644 --- a/app/gui/view/welcome-screen/src/lib.rs +++ b/app/gui/view/welcome-screen/src/lib.rs @@ -6,6 +6,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/build/.gitignore b/build/.gitignore new file mode 100644 index 0000000000..7eb3b21eb4 --- /dev/null +++ b/build/.gitignore @@ -0,0 +1,5 @@ +/dist +/null +/target/ +.idea +/.graphqlconfig diff --git a/build/Cargo.toml b/build/Cargo.toml deleted file mode 100644 index a9eb9897d4..0000000000 --- a/build/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "enso-build3" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[dependencies] -enso-build = { git = "https://github.com/enso-org/ci-build", branch = "develop" } -enso-build-cli = { git = "https://github.com/enso-org/ci-build", branch = "develop" } -ide-ci = { git = "https://github.com/enso-org/ci-build", branch = "develop" } diff --git a/build/README.md b/build/README.md new file mode 100644 index 0000000000..f5f050b30a --- /dev/null +++ b/build/README.md @@ -0,0 +1,97 @@ +

+
+ + Enso Language + +

+ +# [WIP] Enso CI Build Scripts + +The code under this directory is under ongoing intensive development. As such it +has not been reviewed or cleaned up yet. + +## General principles + +- Written in Rust. +- Portable. Works on any platform that Enso targets. +- Do not duplicate functionality that is already available in tools being part + of our tech stack. +- Supports both developers and CI use-cases (and environments). Developers can + call it locally to do anything that CI does. +- Does not require much setup work. Where feasible sets things up for the user. + +### Concepts + +#### Target + +- Can be built locally from sources. Building yields artifacts. +- Artifacts are self-contained to a single filesystem directory. +- Artifacts can be downloaded from a remote location rather than built with the + same effect. +- Can contain other targets. +- Artifacts can be platform-specific. +- Artifacts must be releasable as CI run artifacts or release assets. + +# Usage + +While the script is invoked using `cargo run`, the convenience `run` script is +provided in the repository root. + +The general usage form is: + +``` +./run [options] +``` + +The command itself is usually a combination of target name and subcommand, like +`ide build` or `backend test`. + +At every layer, the `--help` command can be used to get more information. Note +that the information depends on the command, so running `./run --help` will not +give you the same information as `./run ide --help` nor +`./run ide build --help`. + +## Targets + +### IDE + +IDE is the top level target for our project. It consists of `GUI` and `Backend` +targets. + +Sources consist mostly of TS code for the Electron client and packaging. + +Artifacts are platform specific and consist of the single image file. + +### GUI + +GUI is the front-end part of the project. It consists of `WASM` target. + +Sources consist mostly of TS code for the web page that embeds the `WASM` +binaries. + +Artifacts are portable across the platforms and consist of the web page that can +be served either using Electron client (as IDE does) or using a web server (like +the Cloud version of Enso). + +### WASM + +This is the core of GUI, written in Rust. It has no external dependencies. + +Artifacts are portable across the platforms and consist of the single WASM +binary accompanied by the JS snippets and glue. + +### Backend + +Backend is the back-end part of the project, as seen from the IDE perspective. +It contains a Project Manager bundle that includes: + +- Project Manager native image; +- Enso Engine distribution (with the Standard Library); +- GraalVM distribution. + +These components are not represented as "Targets" (in terms of build script) but +could be and likely will be in the future. diff --git a/build/build-utils/src/lib.rs b/build/build-utils/src/lib.rs deleted file mode 100644 index 2cd5759b5b..0000000000 --- a/build/build-utils/src/lib.rs +++ /dev/null @@ -1,117 +0,0 @@ -//! A crate with many utilities for build scripts, for example downloading packages form GitHub or -//! easier management of env vars and paths. - -// === Features === -#![feature(trait_alias)] -// === Standard Linter Configuration === -#![deny(non_ascii_idents)] -#![warn(unsafe_code)] -#![allow(clippy::let_and_return)] -// === Non-Standard Linter Configuration === -#![allow(clippy::option_map_unit_fn)] -#![allow(clippy::precedence)] -#![allow(dead_code)] -#![deny(unconditional_recursion)] -#![warn(missing_copy_implementations)] -#![warn(missing_debug_implementations)] -#![warn(missing_docs)] -#![warn(trivial_casts)] -#![warn(trivial_numeric_casts)] -#![warn(unused_import_braces)] -#![warn(unused_qualifications)] - -use std::fmt::Display; -use std::io::ErrorKind; -use std::path; - - - -// ===================== -// === GithubRelease === -// ===================== - -/// Types that can yield a reference to std::path::Path. -pub trait PathRef = AsRef; - -/// A structure describing a concrete release package on GitHub. The [`project_url`] should be a -/// project's main page on GitHub. -#[derive(Debug)] -#[allow(missing_docs)] -pub struct GithubRelease { - pub project_url: T, - pub version: T, - pub filename: T, -} - -impl + Display> GithubRelease { - /// Download the release package from GitHub. If the target file already exists, it will be - /// removed first. - pub fn download(&self, destination_dir: &path::Path) { - let url = - format!("{}/releases/download/{}/{}", self.project_url, self.version, self.filename); - let destination_file = destination_dir.join(self.filename.as_ref()); - remove_old_file(&destination_file); - let mut resp = reqwest::blocking::get(&url).expect("Download failed."); - let mut out = std::fs::File::create(destination_file).expect("Failed to create file."); - std::io::copy(&mut resp, &mut out).expect("Failed to copy file content."); - } -} - - - -// ================== -// === File Utils === -// ================== - -/// Remove the old file if it exists. -fn remove_old_file(file: &path::Path) { - let result = std::fs::remove_file(&file); - let error = result.err(); - let fatal_error = error.filter(|err| err.kind() != ErrorKind::NotFound); - assert!(fatal_error.is_none()); -} - - - -// ======================= -// === Path Conversion === -// ======================= - -/// Converts path to an absolute form. -pub fn absolute_path(path: impl PathRef) -> std::io::Result { - use path_clean::PathClean; - let path = path.as_ref(); - if path.is_absolute() { - Ok(path.to_path_buf().clean()) - } else { - Ok(std::env::current_dir()?.join(path).clean()) - } -} - - - -// ====================== -// === Env Management === -// ====================== - -/// Get the environment variable or panic if not available. -pub fn env_var_or_panic(var_name: &str) -> String { - match std::env::var(var_name) { - Ok(var) => var, - Err(e) => panic!("Failed to read environment variable {}: {}.", var_name, e), - } -} - - - -// ========================== -// === Build Target Utils === -// ========================== - -/// Checks if the current build is targeting wasm32. -/// -/// Relies on `TARGET` environment variable set by cargo for build scripts. -pub fn targeting_wasm() -> bool { - let target = env_var_or_panic("TARGET"); - target.contains("wasm32") -} diff --git a/build/build/Cargo.toml b/build/build/Cargo.toml new file mode 100644 index 0000000000..8db97e79c4 --- /dev/null +++ b/build/build/Cargo.toml @@ -0,0 +1,85 @@ +[package] +name = "enso-build" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +anyhow = "1.0.44" +async-compression = "0.3.12" +async-trait = "0.1.51" +aws-config = "0.49.0" +aws-sdk-ecr = "0.19.0" +aws-sdk-s3 = "0.19.0" +base64 = "0.13.0" +bytes = "1.0.0" +byte-unit = { version = "4.0.14", features = ["serde"] } +cached = "0.39.0" +cfg-if = "1.0.0" +chrono = { version = "0.4.19", features = ["serde"] } +clap = { version = "3.1.5", features = ["derive", "env", "wrap_help"] } +derivative = "2.2.0" +derive_more = "0.99.17" +dirs = "4.0.0" +filetime = "0.2.15" +flate2 = "1.0.22" +flume = "0.10.10" +fs_extra = "1.2.0" +futures = "0.3.17" +futures-util = "0.3.17" +glob = "0.3.0" +#graphql_client = "0.10.0" +#handlebars = "4.2.1" +heck = "0.4.0" +humantime = "2.1.0" +ide-ci = { path = "../ci_utils" } +indexmap = "1.7.0" +indicatif = "0.17.1" +itertools = "0.10.1" +lazy_static = "1.4.0" +#git2 = "0.13.25" +log = "0.4.14" +mime = "0.3.16" +nix = { workspace = true } +octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [ + "rustls" +] } +ouroboros = "0.15.0" +paste = "1.0.7" +path-absolutize = "3.0.11" +platforms = { version = "3.0.0", features = ["serde"] } +pin-project = "1.0.8" +port_check = "0.1.5" +pretty_env_logger = "0.4.0" +pulldown-cmark = "0.9.1" +rand = "0.8.4" +regex = "1.5.4" +reqwest = { version = "0.11.5", default-features = false, features = [ + "stream" +] } +semver = { version = "1.0.4", features = ["serde"] } +serde = { version = "1.0.130", features = ["derive"] } +serde_json = "1.0.68" +serde_yaml = "0.9.10" +scopeguard = "1.1.0" +shrinkwraprs = "0.3.0" +strum = { version = "0.24.0", features = ["derive"] } +sysinfo = "0.26.2" +tar = "0.4.37" +tempfile = "3.2.0" +toml = "0.5.8" +tokio = { workspace = true } +tracing = { version = "0.1.32" } +tracing-subscriber = "0.3.11" +console-subscriber = { workspace = true } +unicase = "2.6.0" +url = "2.2.2" +uuid = { version = "1.1.0", features = ["v4"] } +walkdir = "2.3.2" +which = "4.2.2" +whoami = "1.2.1" +zip = "0.6.2" + +[build-dependencies] +ide-ci = { path = "../ci_utils" } +serde_yaml = "0.9.10" diff --git a/build/build/build.rs b/build/build/build.rs new file mode 100644 index 0000000000..5b54ab96a4 --- /dev/null +++ b/build/build/build.rs @@ -0,0 +1,19 @@ +// === Features === +#![feature(exit_status_error)] + +use ide_ci::prelude::*; + +use ide_ci::env::expect_var; + + + +fn main() -> Result { + println!("cargo:rerun-if-changed=paths.yaml"); + let yaml_contents = include_bytes!("paths.yaml"); + let code = ide_ci::paths::process(yaml_contents.as_slice())?; + let out_dir = expect_var("OUT_DIR")?.parse2::()?; + let out_path = out_dir.join("paths.rs"); + ide_ci::fs::write(&out_path, code)?; + std::process::Command::new("rustfmt").arg(&out_path).status()?.exit_ok()?; + Ok(()) +} diff --git a/build/build/examples/artifact.rs b/build/build/examples/artifact.rs new file mode 100644 index 0000000000..4cfa814ba3 --- /dev/null +++ b/build/build/examples/artifact.rs @@ -0,0 +1,73 @@ +// === Features === +#![feature(default_free_fn)] + +use enso_build::prelude::*; + +use ide_ci::actions::artifacts; +use ide_ci::actions::artifacts::download::ArtifactDownloader; +use ide_ci::actions::artifacts::run_session::SessionClient; +use tempfile::TempDir; + + + +#[tokio::main] +async fn main() -> Result { + let dir = std::env::current_exe()?.parent().unwrap().to_owned(); + + debug!("Will upload {}", dir.display()); + let provider = artifacts::discover_recursive(dir); + artifacts::upload(provider, "MyPrecious", default()).await?; + + + let file = std::env::current_exe()?; + debug!("Will upload {}", file.display()); + let artifact_name = file.file_name().unwrap().to_str().unwrap(); + let provider = artifacts::single_file_provider(file.clone())?; + artifacts::upload(provider, artifact_name, default()).await?; + debug!("Upload done!"); + // artifacts::upload_single_file(file, ) + + let context = artifacts::context::Context::new_from_env()?; + let session = SessionClient::new(&context)?; + + // debug!("Checking artifacts through official API"); + // let octocrab = setup_octocrab()?; + // let run_id = ide_ci::actions::env::run_id()?; + // debug!("Got run ID {run_id}"); + // let run = octocrab.workflows("enso-org", "ci-build").get(run_id).await?; + // debug!("Got run information {run:?}"); + // let artifacts = octocrab + // .actions() + // .list_workflow_run_artifacts("enso-org", "ci-build", run_id) + // .send() + // .await?; + // debug!("Got artifacts information {artifacts:?}"); + + + debug!("Checking artifacts through runtime API"); + let list = session.list_artifacts().await?; + dbg!(&list); + + let relevant_entry = list + .iter() + .find(|artifact| artifact.name == artifact_name) + .ok_or_else(|| anyhow!("Failed to find artifact by name {artifact_name}."))?; + + dbg!(&relevant_entry); + + let items = ide_ci::actions::artifacts::raw::endpoints::get_container_items( + &context.json_client()?, + relevant_entry.file_container_resource_url.clone(), + &relevant_entry.name, + ) + .await?; + dbg!(&items); + + let temp = TempDir::new()?; + let downloader = ArtifactDownloader::new(session.clone(), artifact_name).await?; + downloader.download_all_to(temp.path()).await?; + + let expected_path = temp.path().join(artifact_name); + assert_eq!(std::fs::read(&expected_path)?, std::fs::read(&file)?); + Ok(()) +} diff --git a/build/build/examples/experiments.rs b/build/build/examples/experiments.rs new file mode 100644 index 0000000000..413063bd7c --- /dev/null +++ b/build/build/examples/experiments.rs @@ -0,0 +1,20 @@ +use enso_build::prelude::*; + +use enso_build::setup_octocrab; +use ide_ci::models::config::RepoContext; +use octocrab::models::ReleaseId; + + + +#[tokio::main] +async fn main() -> Result { + let octo = setup_octocrab().await?; + let repo = RepoContext::from_str("enso-org/enso-staging")?; + let handler = repo.repos(&octo); + let releases = handler.releases(); + + let release = releases.get_by_id(ReleaseId(59585385)).await?; + dbg!(&release); + + Ok(()) +} diff --git a/build/build/examples/s3.rs b/build/build/examples/s3.rs new file mode 100644 index 0000000000..4aa2824ffd --- /dev/null +++ b/build/build/examples/s3.rs @@ -0,0 +1,31 @@ +use enso_build::prelude::*; + +use aws_sdk_s3::model::ObjectCannedAcl; +use aws_sdk_s3::types::ByteStream; +use aws_sdk_s3::Client; +use enso_build::aws::BucketContext; +use enso_build::aws::EDITIONS_BUCKET_NAME; + + + +#[tokio::main] +async fn main() -> Result { + let config = dbg!(aws_config::load_from_env().await); + let bucket_context = BucketContext { + client: Client::new(&config), + bucket: EDITIONS_BUCKET_NAME.to_string(), + upload_acl: ObjectCannedAcl::PublicRead, + key_prefix: "enso".into(), + }; + + // std::env::set_var("AWS_SECRET_ACCESS_KEY", std::env::var("AWS_SECRET_ACCESS_KEY")?.trim()); + + let test_file = "test_file.exe"; + dbg!( + bucket_context + .put(test_file, ByteStream::from_path(&std::env::current_exe()?).await?) + .await? + ); + + Ok(()) +} diff --git a/build/build/paths.yaml b/build/build/paths.yaml new file mode 100644 index 0000000000..15f7eea41e --- /dev/null +++ b/build/build/paths.yaml @@ -0,0 +1,96 @@ +# This file is used to generate `target/debug/build/enso-build-/out/paths.rs`. +# Generation logic is in `ci_utils/src/paths.rs`. + +/: + .github/: + workflows/: + benchmark.yml: + changelog.yml: + gui.yml: + nightly.yml: + scala-new.yml: + app/: + gui/: + ide-desktop/: + lib/: + client/: + content/: + icons/: + project-manager/: + build/: + prettier/: + built-distribution/: + ? path: "enso-engine-" + type: engine_package + "enso-bundle-": + "project-manager-bundle-": + enso: + dist/: + bin/: + client/: + content/: + assets/: + package.json: + preload.js: + icons/: + project-manager/: + tmp/: + # Final WASM artifacts in `dist` directory. + wasm/: + ? path: ide.wasm + var: wasm_main + ? path: ide_bg.wasm + var: wasm_main_raw + ? path: ide.js + var: wasm_glue + init: + build-init: + build.json: + distribution/: + editions/: + .yaml: + engine/: + runtime/: + target/: + bench-report.xml: + lib/: + rust/: + parser/: + generate-java/: + java/: + target/: + generated-java/: + tools/: + ci/: + docker/: + docker-entrypoint.sh: + Dockerfile: + simple-library-server/: + build.sbt: + run: + CHANGELOG.md: + +project-manager/: + bin/: + project-manager: + dist/: + runtime/: + +# Engine Package +# https://enso.org/docs/developer/enso/distribution/distribution.html#layout-of-an-enso-version-package +{ path: enso-/, var: engine_package }: + bin/: + components/: + editions/: + lib/: + manifest.yaml: + +# Engine Bundle +# https://enso.org/docs/developer/enso/distribution/distribution.html#portable-enso-distribution-layout +{ path: enso/, var: engine_bundle }: + dist/: + /: + edition/: + .yaml: + runtime/: + .enso.portable: diff --git a/build/build/src/aws.rs b/build/build/src/aws.rs new file mode 100644 index 0000000000..143c0f64c7 --- /dev/null +++ b/build/build/src/aws.rs @@ -0,0 +1,204 @@ +use crate::prelude::*; + +use crate::version::BuildKind; + +use anyhow::Context; +use aws_sdk_s3::model::ObjectCannedAcl; +use aws_sdk_s3::output::PutObjectOutput; +use aws_sdk_s3::types::ByteStream; +use bytes::Buf; +use ide_ci::models::config::RepoContext; +use serde::de::DeserializeOwned; + + +// ============== +// === Export === +// ============== + +pub mod ecr; + + + +/// The upper limit on number of nightly editions that are stored in the bucket. +pub const NIGHTLY_EDITIONS_LIMIT: usize = 20; + +pub const EDITIONS_BUCKET_NAME: &str = "editions.release.enso.org"; + +pub const MANIFEST_FILENAME: &str = "manifest.yaml"; + + + +#[derive(Clone, Debug, Display, Serialize, Deserialize, Shrinkwrap)] +pub struct Edition(pub String); + +impl AsRef for Edition { + fn as_ref(&self) -> &str { + self.0.as_str() + } +} + +impl> From for Edition { + fn from(value: T) -> Self { + Edition(value.into()) + } +} + +impl Edition { + pub fn is_nightly(&self) -> bool { + // TRANSITION: old nightlies + self.0.contains("nightly") + || Version::find_in_text(self) + .as_ref() + .map_or(false, |version| BuildKind::Nightly.matches(version)) + } +} + + + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Manifest { + /// Sequence of edition names. + pub editions: Vec, +} + +impl Manifest { + pub fn with_new_nightly( + &self, + new_nightly: Edition, + nightlies_count_limit: usize, + ) -> (Manifest, Vec<&Edition>) { + let (nightlies, non_nightlies) = + self.editions.iter().partition::, _>(|e| e.is_nightly()); + let nightlies_count_to_remove = (1 + nightlies.len()).saturating_sub(nightlies_count_limit); + debug!( + "Will remove {} nightly editions from {} found.", + nightlies_count_to_remove, + nightlies.len() + ); + let (nightlies_to_remove, nightlies_to_keep) = + nightlies.split_at(nightlies_count_to_remove); + + let mut new_editions = non_nightlies; + new_editions.extend(nightlies_to_keep); + new_editions.push(&new_nightly); + + let new_manifest = Manifest { editions: new_editions.into_iter().cloned().collect() }; + (new_manifest, nightlies_to_remove.to_vec()) + } +} + +#[derive(Clone, Debug)] +pub struct BucketContext { + pub client: aws_sdk_s3::Client, + pub bucket: String, + pub upload_acl: ObjectCannedAcl, + pub key_prefix: String, +} + +impl BucketContext { + pub async fn get(&self, path: &str) -> Result { + Ok(self + .client + .get_object() + .bucket(&self.bucket) + .key(format!("{}/{}", self.key_prefix, path)) + .send() + .await? + .body) + } + + pub async fn put(&self, path: &str, data: ByteStream) -> Result { + dbg!(self + .client + .put_object() + .bucket(&self.bucket) + .acl(self.upload_acl.clone()) + .key(format!("{}/{}", self.key_prefix, path)) + .body(data)) + .send() + .await + .anyhow_err() + } + + pub async fn get_yaml(&self, path: &str) -> Result { + let text = self.get(path).await?.collect().await?; + serde_yaml::from_reader(text.reader()).anyhow_err() + } + + pub async fn put_yaml(&self, path: &str, data: &impl Serialize) -> Result { + let buf = serde_yaml::to_string(data)?; + self.put(path, ByteStream::from(buf.into_bytes())).await + } +} + +pub async fn update_manifest(repo_context: &RepoContext, edition_file: &Path) -> Result { + let bucket_context = BucketContext { + client: aws_sdk_s3::Client::new(&aws_config::load_from_env().await), + bucket: EDITIONS_BUCKET_NAME.to_string(), + upload_acl: ObjectCannedAcl::PublicRead, + key_prefix: repo_context.name.clone(), + }; + + let new_edition_name = Edition( + edition_file + .file_stem() + .context("Edition file path is missing filename stem!")? + .as_str() + .to_string(), + ); + ide_ci::fs::expect_file(edition_file)?; + + let manifest = bucket_context.get_yaml::(MANIFEST_FILENAME).await?; + debug!("Got manifest index from S3: {:#?}", manifest); + + + let (new_manifest, nightlies_to_remove) = + manifest.with_new_nightly(new_edition_name, NIGHTLY_EDITIONS_LIMIT); + for nightly_to_remove in nightlies_to_remove { + debug!("Should remove {}", nightly_to_remove); + } + + let new_edition_filename = + edition_file.file_name().context("Edition file path is missing filename!")?; + bucket_context + .put(new_edition_filename.as_str(), ByteStream::from_path(&edition_file).await?) + .await?; + + bucket_context.put_yaml("manifest.yaml", &new_manifest).await?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + + // #[tokio::test] + // async fn aaa() -> Result { + // let repo = RepoContext::from_str("enso-org/enso")?; + // let paths = + // Paths::new_version(r"H:\NBO\enso", Version::parse("2022.1.1-nightly.2022-01-28")?)?; + // update_manifest(&repo, &paths).await?; + // Ok(()) + // } + + #[test] + fn updating_manifest() -> Result { + let old_nightly = serde_yaml::from_str::( + r"editions: +- '2021.11' +- 2021.13-SNAPSHOT +- 2021.14-SNAPSHOT +- 2021.15-SNAPSHOT +- 2021.12-SNAPSHOT +- nightly-2021-08-12 +- nightly-2021-08-12.1 +- nightly-2021-08-16 +- nightly-2021-09-03 +", + )?; + old_nightly.with_new_nightly(Edition("foo_bar".into()), 20); + + Ok(()) + } +} diff --git a/build/build/src/aws/ecr.rs b/build/build/src/aws/ecr.rs new file mode 100644 index 0000000000..41cce0a8c2 --- /dev/null +++ b/build/build/src/aws/ecr.rs @@ -0,0 +1,58 @@ +use crate::prelude::*; + +use aws_sdk_ecr::model::Repository; +use ide_ci::programs::docker; + + +// ============== +// === Export === +// ============== + +pub mod runtime; + + + +#[instrument(skip(client))] +pub async fn resolve_repository( + client: &aws_sdk_ecr::Client, + repository_name: &str, +) -> Result { + let repositories = + client.describe_repositories().repository_names(repository_name).send().await?; + repositories + .repositories + .context("Missing repositories information.")? + .pop() + .context(format!("Cannot find repository {repository_name} in the registry.")) +} + +#[instrument(skip(client))] +pub async fn get_credentials(client: &aws_sdk_ecr::Client) -> Result { + let token = client.get_authorization_token().send().await?; + let auth_data = token + .authorization_data() + .context("Missing authorization data.")? + .first() + .context("Missing authorization data entry.")?; + let token_encoded = + auth_data.authorization_token.as_ref().context("Missing authorization token.")?; + let token_decoded = base64::decode_config(token_encoded, base64::STANDARD) + .context("Failed to decode the token.")?; + let token_decoded = String::from_utf8(token_decoded)?; + let proxy = auth_data.proxy_endpoint().context("Missing proxy endpoint.")?; + let fields = token_decoded.split(':').collect_vec(); + let [username, password] = fields.as_slice() else { + bail!("Invalid token format. Parts: {:?}", fields); + }; + Ok(docker::Credentials::new(*username, *password, proxy)) +} + +#[instrument(skip(client), ret)] +pub async fn get_repository_uri( + client: &aws_sdk_ecr::Client, + repository_name: &str, +) -> Result { + let repository = resolve_repository(client, repository_name).await?; + let repository_uri = repository.repository_uri().context("Missing repository URI.")?; + Ok(repository_uri.into()) +} diff --git a/build/build/src/aws/ecr/runtime.rs b/build/build/src/aws/ecr/runtime.rs new file mode 100644 index 0000000000..2c03cbcba2 --- /dev/null +++ b/build/build/src/aws/ecr/runtime.rs @@ -0,0 +1,28 @@ +use crate::prelude::*; + +use crate::paths::generated; + +use ide_ci::programs::docker::BuildOptions; +use ide_ci::programs::docker::ImageId; +use ide_ci::programs::Docker; + + + +/// Name of the repository. +pub const NAME: &str = "runtime"; + +pub const REGION: &str = "eu-west-1"; + +#[instrument(fields(%dockerfile, %engine_package_root))] +pub async fn build_runtime_image( + dockerfile: generated::RepoRootToolsCiDocker, + engine_package_root: generated::EnginePackage, + tag: String, +) -> Result { + ide_ci::fs::copy_to(dockerfile.docker_entrypoint_sh, &engine_package_root.bin)?; + let mut opts = BuildOptions::new(&engine_package_root); + opts.file = Some(dockerfile.dockerfile.to_path_buf()); + opts.tags.push(tag); + let id = Docker.build(opts).await?; + Ok(id) +} diff --git a/build/build/src/bump_version.rs b/build/build/src/bump_version.rs new file mode 100644 index 0000000000..5ccbfb5bc5 --- /dev/null +++ b/build/build/src/bump_version.rs @@ -0,0 +1,14 @@ +// const fs = require('fs') +// +// const path = 'build.sbt' +// const version = process.argv[2] +// const edition = process.argv[3] +// +// const content = fs.readFileSync(path, { encoding: 'utf-8' }) +// const updated = content +// .replace(/val ensoVersion.*= ".*"/, 'val ensoVersion = "' + version + '"') +// .replace(/val currentEdition.*= ".*"/, 'val currentEdition = "' + edition + '"') +// fs.writeFileSync(path, updated) +// +// console.log('Updated build version to ' + version) +// console.log('Updated build edition to ' + edition) diff --git a/build/build/src/changelog.rs b/build/build/src/changelog.rs new file mode 100644 index 0000000000..e09ac075ea --- /dev/null +++ b/build/build/src/changelog.rs @@ -0,0 +1,71 @@ +use crate::prelude::*; + +use pulldown_cmark::Event; +use pulldown_cmark::HeadingLevel; +use pulldown_cmark::Tag::Heading; +use std::ops::Range; + + +// ============== +// === Export === +// ============== + +pub mod check; + + + +#[derive(Clone, Copy, Debug)] +pub struct Changelog<'a>(pub &'a str); + +impl<'a> Changelog<'a> { + pub fn iterate_headers(&self) -> impl Iterator> + 'a { + use pulldown_cmark::Options; + use pulldown_cmark::Parser; + let enable_all_exts = Options::all(); + Parser::new_ext(self.0, enable_all_exts) + .into_offset_iter() + .filter_map(|(e, pos)| Header::new(self.0, e, pos)) + } + + pub fn last_release(&self) -> Result { + self.iterate_headers() + .find_map(|header| Version::find_in_text(header.text).ok()) + .context("No release header with version number was found.") + } + + pub fn top_release_notes(&self) -> Result { + let mut headers = self.iterate_headers(); + let first_header = headers.next().context("Failed to find a level one header.")?; + let file_end_pos = self.0.len() + 1; + let next_header_start = headers.next().map_or(file_end_pos, |h| h.pos.start); + let contents = self.0[first_header.pos.end..next_header_start].trim(); + let entry = + Entry { header: first_header.text.to_string(), contents: contents.to_string() }; + Ok(entry) + } +} + +#[derive(Clone, Debug)] +pub struct Entry { + pub header: String, + pub contents: String, +} + +#[derive(Clone, Debug)] +pub struct Header<'a> { + /// Text of the header. + pub text: &'a str, + /// Position in the changelog file text. + pub pos: Range, +} + +impl<'a> Header<'a> { + pub fn new(whole_text: &'a str, event: Event, position: Range) -> Option { + is_release_notes_header(&event) + .then_some(Self { text: whole_text[position.clone()].trim(), pos: position }) + } +} + +pub fn is_release_notes_header(event: &Event) -> bool { + matches!(event, Event::Start(Heading(HeadingLevel::H1, _, _))) +} diff --git a/build/build/src/changelog/check.rs b/build/build/src/changelog/check.rs new file mode 100644 index 0000000000..f4d1c2f061 --- /dev/null +++ b/build/build/src/changelog/check.rs @@ -0,0 +1,67 @@ +use crate::prelude::*; + +use crate::ci::labels::NO_CHANGELOG_CHECK; +use crate::paths::generated::RepoRoot; + +use ide_ci::actions::workflow::MessageLevel; +use ide_ci::programs::Git; + + + +/// Name of the remote source in the working copy. +const REMOTE_NAME: &str = "origin"; + +/// Check if a given label is the one that indicates that the changelog check should be skipped. +pub fn is_skip_changelog_label(label: &octocrab::models::Label) -> bool { + label.name == NO_CHANGELOG_CHECK +} + +/// Check if the given PR has the changelog check skipped. +pub fn has_check_disabled(pull_request: &octocrab::models::pulls::PullRequest) -> bool { + if pull_request.labels.iter().flatten().any(is_skip_changelog_label) { + info!("Skipping changelog check because the PR has the label {}.", NO_CHANGELOG_CHECK); + false + } else { + true + } +} + +/// Check if we are in context where the changelog check should be performed. +pub fn is_check_needed(context: &ide_ci::actions::context::Context) -> bool { + if let Some(pr) = context.payload.pull_request.as_ref() { + info!("Checking if changelog is up to date for PR #{}.", pr.number); + has_check_disabled(pr) + } else { + info!("Not a pull request, skipping the check."); + false + } +} + +#[instrument("Checking if the changelog has been updated.", fields(%repo_path), skip(context), err)] +pub async fn check(repo_path: RepoRoot, context: ide_ci::actions::Context) -> Result { + trace!("The context is {context:#?}."); + if !is_check_needed(&context) { + debug!("No changelog check necessary."); + return Ok(()); + }; + + let changelog = crate::paths::generated::RepoRootChangelogMd::new(&repo_path).path; + let repository = context.payload.repository.context("Missing repository information.")?; + let default_branch = + repository.default_branch.context("Missing default branch information.")?; + let git = Git::new(&repo_path).await?; + let remote_base = format!("{REMOTE_NAME}/{default_branch}"); + let files_changed = git.diff_against(remote_base).await?; + debug!("Files changed: {files_changed:#?}."); + let changelog_was_changed = files_changed.iter().contains(&changelog); + if !changelog_was_changed { + let message = format!( + "{} was not updated. Either update it or add the '{}' label to the PR.", + crate::paths::generated::RepoRootChangelogMd::segment_name(), + NO_CHANGELOG_CHECK + ); + ide_ci::actions::workflow::message(MessageLevel::Error, &message); + bail!(message); + } + Ok(()) +} diff --git a/build/build/src/ci.rs b/build/build/src/ci.rs new file mode 100644 index 0000000000..9177c5428e --- /dev/null +++ b/build/build/src/ci.rs @@ -0,0 +1,11 @@ +// use crate::prelude::*; + +/// Labels used in the repositories where this library is used for CI. +/// They should be defined in the `.github/settings.yml` file. +pub mod labels { + /// Name of the label that is used to mark the PRs that should not require changelog entry. + pub const NO_CHANGELOG_CHECK: &str = "CI: No changelog needed"; + + /// Name of the label that is used to mark the PRs that require clean builds. + pub const CLEAN_BUILD_REQUIRED: &str = "CI: Clean build required"; +} diff --git a/build/build/src/config.rs b/build/build/src/config.rs new file mode 100644 index 0000000000..61226ef431 --- /dev/null +++ b/build/build/src/config.rs @@ -0,0 +1,128 @@ +use crate::prelude::*; + +use byte_unit::Byte; +use ide_ci::program; +use ide_ci::programs; +use semver::VersionReq; + + + +pub fn load_yaml(yaml_text: &str) -> Result { + let raw = serde_yaml::from_str::(yaml_text)?; + raw.try_into() +} + +#[derive(Clone, Debug, Display, PartialEq, Eq, Hash, Serialize, Deserialize, strum::EnumString)] +pub enum RecognizedProgram { + #[strum(default)] + Other(String), +} + +impl RecognizedProgram { + pub async fn version(&self) -> Result { + match self { + RecognizedProgram::Other(program) => { + if let Some(cargo_program) = program.strip_prefix("cargo-") { + // Special case for cargo-programs. Cargo is able to find them even if they are + // not in PATH. Thus, we invoke them via cargo, not to spuriously fail the + // version check. + let version_string = programs::Cargo + .cmd()? + .arg(cargo_program) + .arg("--version") + .run_stdout() + .await?; + Version::find_in_text(&version_string) + } else { + program::Unknown(program.clone()).version().await + } + } + } + } +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct ConfigRaw { + pub wasm_size_limit: Option, + pub required_versions: HashMap, +} + +/// The configuration of the script that is being provided by the external environment. +/// +/// In our case, it is usually a configuration file in the main repository. +#[derive(Clone, Debug, Default)] +pub struct Config { + pub wasm_size_limit: Option, + pub required_versions: HashMap, +} + +impl Config { + pub async fn check_programs(&self) -> Result { + for (program, version_req) in &self.required_versions { + let found = program.version().await?; + if !version_req.matches(&found) { + bail!( + "Found program {} in version {} that does not fulfill requirement {}.", + program, + found, + version_req + ); + } else { + info!( + "Found program {} in supported version {} (required {}).", + program, found, version_req + ); + } + } + Ok(()) + } +} + +impl TryFrom for Config { + type Error = anyhow::Error; + + fn try_from(value: ConfigRaw) -> std::result::Result { + let mut required_versions = HashMap::new(); + for (program, version_req) in value.required_versions { + required_versions.insert( + ::from_str(&program)?, + ::from_str(&version_req)?, + ); + } + + Ok(Self { + wasm_size_limit: value + .wasm_size_limit + .map(|limit_text| ::from_str(&limit_text)) + .transpose()?, + required_versions, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use ide_ci::log::setup_logging; + + #[tokio::test] + #[ignore] + async fn deserialize() -> Result { + setup_logging()?; + let config = r#" +# Options intended to be common for all developers. +wasm-size-limit: "4.37MB" +required-versions: + node: =16.15.0 + wasm-pack: ^0.10.2 + flatc: =1.12.0 +"#; + let config = serde_yaml::from_str::(config)?; + dbg!(&config); + dbg!(Config::try_from(config))?.check_programs().await?; + + + Ok(()) + } +} diff --git a/build/build/src/context.rs b/build/build/src/context.rs new file mode 100644 index 0000000000..2cf55e8ad1 --- /dev/null +++ b/build/build/src/context.rs @@ -0,0 +1,69 @@ +use crate::prelude::*; + +use crate::paths::TargetTriple; + +use derivative::Derivative; +use ide_ci::models::config::RepoContext; +use ide_ci::programs::Git; +use octocrab::models::repos::Release; +use octocrab::models::ReleaseId; + + + +/// The basic, common information available in this application. +#[derive(Clone, Derivative, derive_more::Deref)] +#[derivative(Debug)] +pub struct BuildContext { + #[deref] + pub inner: crate::project::Context, + + /// Version to be built. + /// + /// Note that this affects only targets that are being built. If project parts are provided by + /// other means, their version might be different. + pub triple: TargetTriple, + + /// Remote repository is used for release-related operations. This also includes deducing a new + /// version number. + pub remote_repo: RepoContext, +} + +impl BuildContext { + pub fn commit(&self) -> BoxFuture<'static, Result> { + let root = self.repo_root.to_path_buf(); + async move { + match ide_ci::actions::env::GITHUB_SHA.get() { + Ok(commit) => Ok(commit), + Err(_e) => Git::new(root).await?.head_hash().await, + } + } + .boxed() + } + + #[tracing::instrument] + pub fn resolve_release_designator( + &self, + designator: String, + ) -> BoxFuture<'static, Result> { + let repository = self.remote_repo.clone(); + let octocrab = self.octocrab.clone(); + let designator_cp = designator.clone(); + async move { + let release = if let Ok(id) = designator.parse2::() { + repository.find_release_by_id(&octocrab, id).await? + } else { + match designator.as_str() { + "latest" => repository.latest_release(&octocrab).await?, + "nightly" => + crate::version::latest_nightly_release(&octocrab, &repository).await?, + tag => repository.find_release_by_text(&octocrab, tag).await?, + } + }; + Ok(release) + } + .map_err(move |e: anyhow::Error| { + e.context(format!("Failed to resolve release designator `{designator_cp}`.")) + }) + .boxed() + } +} diff --git a/build/build/src/engine.rs b/build/build/src/engine.rs new file mode 100644 index 0000000000..691041f710 --- /dev/null +++ b/build/build/src/engine.rs @@ -0,0 +1,319 @@ +use crate::prelude::*; + +use crate::get_graal_version; +use crate::get_java_major_version; +use crate::paths::generated; +use crate::paths::ComponentPaths; +use crate::paths::Paths; + +use ide_ci::future::AsyncPolicy; +use ide_ci::models::config::RepoContext; +use std::collections::BTreeSet; + + +// ============== +// === Export === +// ============== + +pub mod bundle; +pub mod context; +pub mod env; +pub mod sbt; + +pub use context::RunContext; + + + +const FLATC_VERSION: Version = Version::new(1, 12, 0); +const PARALLEL_ENSO_TESTS: AsyncPolicy = AsyncPolicy::Sequential; + +pub async fn download_project_templates(client: reqwest::Client, enso_root: PathBuf) -> Result { + // Download Project Template Files + let output_base = enso_root.join("lib/scala/pkg/src/main/resources/"); + let url_base = Url::parse("https://github.com/enso-org/project-templates/raw/main/")?; + let to_handle = [ + ("Orders", vec!["data/store_data.xlsx", "src/Main.enso"]), + ("Restaurants", vec!["data/la_districts.csv", "data/restaurants.csv", "src/Main.enso"]), + ("Stargazers", vec!["src/Main.enso"]), + ]; + + let mut futures = Vec::>::new(); + for (project_name, relative_paths) in to_handle { + for relative_path in relative_paths { + let relative_url_base = url_base.join(&format!("{}/", project_name))?; + let relative_output_base = output_base.join(project_name.to_lowercase()); + let client = client.clone(); + let future = async move { + ide_ci::io::web::client::download_relative( + &client, + &relative_url_base, + &relative_output_base, + &PathBuf::from(relative_path), + ) + .await?; + Ok(()) + }; + futures.push(future.boxed()); + } + } + + let _result = ide_ci::future::try_join_all(futures, AsyncPolicy::FutureParallelism).await?; + debug!("Completed downloading templates"); + Ok(()) +} + +#[derive(Clone, Copy, Debug, Display, PartialEq, Eq, PartialOrd, Ord, clap::ArgEnum)] +pub enum Benchmarks { + All, + Runtime, +} + +#[derive(Clone, Copy, Debug, Display, PartialEq, Eq, PartialOrd, Ord, clap::ArgEnum)] +pub enum Tests { + Scala, + #[clap(alias = "stdlib")] + StandardLibrary, +} + +impl Benchmarks { + pub fn sbt_task(self) -> &'static str { + match self { + Benchmarks::All => "bench", + Benchmarks::Runtime => "runtime/bench", + } + } +} + +#[derive(Clone, Debug)] +pub struct BuildConfigurationFlags { + /// If true, repository shall be cleaned at the build start. + /// + /// Makes sense given that incremental builds with SBT are currently broken. + pub test_scala: bool, + pub test_standard_library: bool, + /// Whether benchmarks are compiled. + /// + /// Note that this does not run the benchmarks, only ensures that they are buildable. + pub build_benchmarks: bool, + pub execute_benchmarks: BTreeSet, + /// Used to check that benchmarks do not fail on runtime, rather than obtaining the results. + pub execute_benchmarks_once: bool, + pub build_js_parser: bool, + pub build_engine_package: bool, + pub build_launcher_package: bool, + pub build_project_manager_package: bool, + pub build_launcher_bundle: bool, + pub build_project_manager_bundle: bool, + pub generate_java_from_rust: bool, + pub test_java_generated_from_rust: bool, + pub generate_documentation: bool, + /// Verify License Packages in Distributions. + pub verify_packages: bool, +} + +impl From for BuildConfigurationResolved { + fn from(value: BuildConfigurationFlags) -> Self { + Self::new(value) + } +} + +#[derive(Clone, Debug, Shrinkwrap)] +pub struct BuildConfigurationResolved(BuildConfigurationFlags); + +impl BuildConfigurationResolved { + pub fn new(mut config: BuildConfigurationFlags) -> Self { + if config.build_launcher_bundle { + config.build_launcher_package = true; + config.build_engine_package = true; + } + + if config.build_project_manager_bundle { + config.build_project_manager_package = true; + config.build_engine_package = true; + } + + if config.test_standard_library { + config.build_engine_package = true; + } + + if config.test_java_generated_from_rust { + config.generate_java_from_rust = true; + } + + Self(config) + } +} + +impl BuildConfigurationFlags { + pub fn build_engine_package(&self) -> bool { + self.build_engine_package + || self.build_launcher_bundle + || self.build_project_manager_bundle + || self.test_standard_library + } + + pub fn build_project_manager_package(&self) -> bool { + self.build_project_manager_package || self.build_project_manager_bundle + } + + pub fn build_launcher_package(&self) -> bool { + self.build_launcher_package || self.build_launcher_bundle + } +} + +impl Default for BuildConfigurationFlags { + fn default() -> Self { + Self { + test_scala: false, + test_standard_library: false, + build_benchmarks: false, + execute_benchmarks: default(), + execute_benchmarks_once: false, + build_js_parser: false, + build_engine_package: false, + build_launcher_package: false, + build_project_manager_package: false, + build_launcher_bundle: false, + build_project_manager_bundle: false, + generate_java_from_rust: true, + test_java_generated_from_rust: false, + generate_documentation: false, + verify_packages: false, + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +pub enum ReleaseCommand { + Upload, +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct ReleaseOperation { + pub command: ReleaseCommand, + pub repo: RepoContext, +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct RunOperation { + pub command_pieces: Vec, +} + +impl RunOperation {} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum Operation { + Release(ReleaseOperation), + Run(RunOperation), + Build, +} + +#[derive(Clone, PartialEq, Eq, Debug, Default)] +pub struct BuiltArtifacts { + pub packages: BuiltPackageArtifacts, + pub bundles: BuiltBundleArtifacts, +} + +#[derive(Clone, PartialEq, Eq, Debug, Default)] +pub struct BuiltPackageArtifacts { + pub engine: Option, + pub launcher: Option, + pub project_manager: Option, +} + +impl BuiltPackageArtifacts { + pub fn iter(&self) -> impl IntoIterator { + [&self.engine, &self.launcher, &self.project_manager].into_iter().flat_map(|b| b.iter()) + } +} + +impl IntoIterator for BuiltPackageArtifacts { + type Item = ComponentPaths; + type IntoIter = std::iter::Flatten, 3_usize>>; + + fn into_iter(self) -> Self::IntoIter { + [self.engine, self.launcher, self.project_manager].into_iter().flatten() + } +} + +#[derive(Clone, PartialEq, Eq, Debug, Default)] +pub struct BuiltBundleArtifacts { + pub launcher: Option, + pub project_manager: Option, +} + +impl BuiltBundleArtifacts { + pub fn iter(&self) -> impl IntoIterator { + [&self.project_manager, &self.launcher].into_iter().flat_map(|b| b.iter()) + } +} + +impl IntoIterator for BuiltBundleArtifacts { + type Item = ComponentPaths; + type IntoIter = std::iter::Flatten, 2_usize>>; + + fn into_iter(self) -> Self::IntoIter { + [self.launcher, self.project_manager].into_iter().flatten() + } +} + +pub async fn create_packages(paths: &Paths) -> Result> { + let mut ret = Vec::new(); + if paths.launcher.root.exists() { + debug!("Packaging launcher."); + ret.push(package_component(&paths.launcher).await?); + } + Ok(ret) +} + +#[async_trait] +trait ComponentPathExt { + async fn pack(&self) -> Result; + fn clear(&self) -> Result; +} + +#[async_trait] +impl ComponentPathExt for ComponentPaths { + async fn pack(&self) -> Result { + ide_ci::archive::create(&self.artifact_archive, [&self.dir]).await + } + fn clear(&self) -> Result { + ide_ci::fs::remove_dir_if_exists(&self.root)?; + ide_ci::fs::remove_file_if_exists(&self.artifact_archive) + } +} + +pub async fn package_component(paths: &ComponentPaths) -> Result { + #[cfg(not(target_os = "windows"))] + { + let pattern = paths + .dir + .join_iter(["bin", "*"]) + .with_extension(std::env::consts::EXE_EXTENSION) + .display() + .to_string(); + for binary in glob::glob(&pattern)? { + ide_ci::fs::allow_owner_execute(binary?)?; + } + } + + ide_ci::archive::create(&paths.artifact_archive, [&paths.root]).await?; + Ok(paths.artifact_archive.clone()) +} + +////////////////////////////////// + + +pub async fn deduce_graal( + client: Octocrab, + build_sbt: &generated::RepoRootBuildSbt, +) -> Result { + let build_sbt_content = ide_ci::fs::tokio::read_to_string(build_sbt).await?; + Ok(ide_ci::cache::goodie::graalvm::GraalVM { + client, + graal_version: get_graal_version(&build_sbt_content)?, + java_version: get_java_major_version(&build_sbt_content)?, + os: TARGET_OS, + arch: TARGET_ARCH, + }) +} diff --git a/build/build/src/engine/bundle.rs b/build/build/src/engine/bundle.rs new file mode 100644 index 0000000000..948aac9c94 --- /dev/null +++ b/build/build/src/engine/bundle.rs @@ -0,0 +1,94 @@ +use crate::prelude::*; + +use crate::engine::ComponentPathExt; +use crate::paths::ComponentPaths; +use crate::paths::Paths; + +use anyhow::Context; +use ide_ci::programs::java::JAVA_HOME; + + + +#[async_trait] +pub trait Bundle { + const PREFIX: &'static str; + const DIRNAME: &'static str; + + fn base_distribution(paths: &Paths) -> &ComponentPaths; + + fn suggest_paths(paths: &Paths) -> ComponentPaths { + ComponentPaths::new(&paths.build_dist_root, Self::PREFIX, Self::DIRNAME, &paths.triple) + } + + async fn create(paths: &Paths) -> Result { + let bundle = Self::suggest_paths(paths); + + bundle.clear()?; + + let base_component = Self::base_distribution(paths); + ide_ci::fs::copy(&base_component.root, &bundle.root)?; + + // Add engine. + let bundled_engine_dir = bundle.dir.join("dist").join(paths.version().to_string()); + place_component_at(&paths.engine, &bundled_engine_dir).await?; + + // Add GraalVM runtime. + place_graal_under(bundle.dir.join("runtime")).await?; + + // Add portable distribution marker. + ide_ci::fs::copy( + paths.repo_root.join_iter(["distribution", "enso.bundle.template"]), + bundle.dir.join(".enso.bundle"), + )?; + Ok(bundle) + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Launcher; +impl Bundle for Launcher { + const PREFIX: &'static str = "enso-bundle"; + const DIRNAME: &'static str = "enso"; + fn base_distribution(paths: &Paths) -> &ComponentPaths { + &paths.launcher + } +} + +#[derive(Clone, Copy, Debug)] +pub struct ProjectManager; +impl Bundle for ProjectManager { + const PREFIX: &'static str = "project-manager-bundle"; + const DIRNAME: &'static str = "enso"; + fn base_distribution(paths: &Paths) -> &ComponentPaths { + &paths.project_manager + } +} + +// #[context("Placing a GraalVM package under {}", target_directory.as_ref().display())] +pub async fn place_graal_under(target_directory: impl AsRef) -> Result { + let graal_path = { + let java_home = JAVA_HOME.get()?; + if TARGET_OS == OS::MacOS { + // On macOS we need to drop trailing `/Contents/Home` from the path. + java_home + .parent() + .and_then(|p| p.parent()) + .context(format!("Invalid Java home for macOS: {}", java_home.display()))? + .to_path_buf() + } else { + java_home + } + }; + let graal_dirname = graal_path + .file_name() + .context(anyhow!("Invalid Graal Path deduced from JAVA_HOME: {}", graal_path.display()))?; + ide_ci::fs::mirror_directory(&graal_path, target_directory.as_ref().join(graal_dirname)).await +} + +#[context("Placing a Enso Engine package in {}", target_engine_dir.as_ref().display())] +pub async fn place_component_at( + engine_paths: &ComponentPaths, + target_engine_dir: impl AsRef, +) -> Result { + ide_ci::fs::mirror_directory(&engine_paths.dir, &target_engine_dir).await +} diff --git a/build/build/src/engine/context.rs b/build/build/src/engine/context.rs new file mode 100644 index 0000000000..041988afcf --- /dev/null +++ b/build/build/src/engine/context.rs @@ -0,0 +1,537 @@ +use crate::prelude::*; + +use crate::engine; +use crate::engine::bundle::Bundle; +use crate::engine::download_project_templates; +use crate::engine::env; +use crate::engine::sbt::SbtCommandProvider; +use crate::engine::BuildConfigurationResolved; +use crate::engine::BuiltArtifacts; +use crate::engine::ComponentPathExt; +use crate::engine::Operation; +use crate::engine::ReleaseCommand; +use crate::engine::ReleaseOperation; +use crate::engine::FLATC_VERSION; +use crate::engine::PARALLEL_ENSO_TESTS; +use crate::enso::BuiltEnso; +use crate::enso::IrCaches; +use crate::paths::cache_directory; +use crate::paths::Paths; +use crate::paths::TargetTriple; +use crate::project::ProcessWrapper; +use crate::retrieve_github_access_token; + +use ide_ci::actions::workflow::is_in_env; +use ide_ci::cache; +use ide_ci::env::Variable; +use ide_ci::platform::DEFAULT_SHELL; +use ide_ci::programs::graal; +use ide_ci::programs::sbt; +use ide_ci::programs::Flatc; +use ide_ci::programs::Sbt; +use sysinfo::SystemExt; + + + +pub type FutureEnginePackage = BoxFuture<'static, Result>; + +pub type EnginePackageProvider = dyn FnMut() -> FutureEnginePackage + Send + Sync + 'static; + +/// Pretty print option variant name, i.e. whether it is Some or None. +/// +/// Does not print the actual value under Some, so this can be used for `T`s that do not implement +/// `Debug`. +pub fn format_option_variant(value: &Option, f: &mut Formatter) -> std::fmt::Result { + match value { + Some(_) => write!(f, "Some(...)"), + None => write!(f, "None"), + } +} + +#[derive(derive_more::Deref, derive_more::DerefMut, derivative::Derivative)] +#[derivative(Debug)] +pub struct RunContext { + #[deref] + #[deref_mut] + pub inner: crate::project::Context, + pub config: BuildConfigurationResolved, + pub paths: Paths, + /// If set, the engine package (used for creating bundles) will be obtained through this + /// provider rather than built from source along the other Engine components. + #[derivative(Debug(format_with = "format_option_variant"))] + pub external_runtime: Option>, +} + +impl RunContext { + pub fn new( + inner: crate::project::Context, + config: impl Into, + triple: TargetTriple, + external_runtime: Option>, + ) -> Result { + let paths = crate::paths::Paths::new_versions(&inner.repo_root, triple.versions)?; + let context = RunContext { config: config.into(), inner, paths, external_runtime }; + Ok(context) + } + + /// Check that required programs are present (if not, installs them, if supported). Set + /// environment variables for the build to follow. + pub async fn prepare_build_env(&self) -> Result { + // Building native images with Graal on Windows requires Microsoft Visual C++ Build Tools + // available in the environment. If it is not visible, we need to add it. + if TARGET_OS == OS::Windows && ide_ci::programs::vs::Cl.lookup().is_err() { + ide_ci::programs::vs::apply_dev_environment().await?; + } + + // Setup SBT + cache::goodie::sbt::Sbt.install_if_missing(&self.cache).await?; + Sbt.require_present().await?; + + // Other programs. + ide_ci::programs::Git::new_current().await?.require_present().await?; + ide_ci::programs::Go.require_present().await?; + ide_ci::programs::Cargo.require_present().await?; + ide_ci::programs::Node.require_present().await?; + ide_ci::programs::Npm.require_present().await?; + + let prepare_simple_library_server = { + if self.config.test_scala { + let simple_server_path = &self.paths.repo_root.tools.simple_library_server; + ide_ci::programs::Git::new(simple_server_path) + .await? + .cmd()? + .clean() + .run_ok() + .await?; + ide_ci::programs::Npm + .cmd()? + .current_dir(simple_server_path) + .install() + .run_ok() + .left_future() + } else { + ready(Result::Ok(())).right_future() + } + }; + let prepare_simple_library_server = tokio::spawn(prepare_simple_library_server); + + // Setup Conda Environment + // Install FlatBuffers Compiler + // If it is not available, we require conda to install it. We should not require conda in + // other scenarios. + // TODO: After flatc version is bumped, it should be possible to get it without `conda`. + // See: https://www.pivotaltracker.com/story/show/180303547 + if let Err(e) = Flatc.require_present_at(&FLATC_VERSION).await { + debug!("Cannot find expected flatc: {}", e); + // GitHub-hosted runner has `conda` on PATH but not things installed by it. + // It provides `CONDA` variable pointing to the relevant location. + if let Some(conda_path) = std::env::var_os("CONDA").map(PathBuf::from) { + ide_ci::env::prepend_to_path(conda_path.join("bin"))?; + if TARGET_OS == OS::Windows { + // Not sure if it documented anywhere, but this is where installed `flatc` + // appears on Windows. + ide_ci::env::prepend_to_path(conda_path.join("Library").join("bin"))?; + } + } + + ide_ci::programs::Conda + .cmd()? + .args(["install", "-y", "--freeze-installed", "flatbuffers=1.12.0"]) + .run_ok() + .await?; + Flatc.lookup()?; + } + + let _ = self.paths.emit_env_to_actions(); // Ignore error: we might not be run on CI. + debug!("Build configuration: {:#?}", self.config); + + // Setup Tests on Windows + if TARGET_OS == OS::Windows { + env::CiTestTimeFactor.set(&2); + env::CiFlakyTestEnable.set(&true); + } + + // TODO [mwu] + // Currently we rely on Musl to be present on the host machine. Eventually, we should + // consider obtaining it by ourselves. + // if TARGET_OS == OS::Linux { + // let musl = ide_ci::goodies::musl::Musl; + // goodies.require(&musl).await?; + // } + + + // Setup GraalVM + let graalvm = + crate::engine::deduce_graal(self.octocrab.clone(), &self.repo_root.build_sbt).await?; + graalvm.install_if_missing(&self.cache).await?; + graal::Gu.require_present().await?; + + // Make sure that Graal has installed the optional components that we need. + // Some are not supported on Windows, in part because their runtime (Sulong) is not. + // See e.g. https://github.com/oracle/graalpython/issues/156 + let conditional_components: &[graal::Component] = if graal::sulong_supported() { + &[graal::Component::Python, graal::Component::R] + } else { + &[] + }; + + let required_components = + once(graal::Component::NativeImage).chain(conditional_components.iter().copied()); + graal::install_missing_components(required_components).await?; + prepare_simple_library_server.await??; + Ok(()) + } + + pub async fn build(&self) -> Result { + let mut ret = BuiltArtifacts::default(); + + self.prepare_build_env().await?; + if ide_ci::ci::run_in_ci() { + // On CI we remove IR caches. They might contain invalid or outdated data, as are using + // engine version as part of the key. As such, any change made to engine that does not + // change its version might break the caches. + // See (private): https://discord.com/channels/401396655599124480/407883082310352928/939618590158630922 + ide_ci::fs::remove_dir_if_exists(cache_directory())?; + + // Remove the benchmark reports. They are not meant currently to be incrementally + // updated. + ide_ci::fs::remove_if_exists(&self.paths.repo_root.engine.runtime.bench_report_xml)?; + } + + if self.config.test_standard_library { + // If we run tests, make sure that old and new results won't end up mixed together. + ide_ci::fs::reset_dir(&self.paths.test_results)?; + } + + // Workaround for incremental compilation issue, as suggested by kustosz. + // We target files like + // engine/runtime/target/scala-2.13/classes/META-INF/org/enso/interpreter/node/expression/ + // builtin/BuiltinMethods.metadata but need to remove more so sbt can figure out it needs to + // rebuild. + // Otherwise, errors like this may occur: + // sbt:warning: java.lang.ClassNotFoundException: + // org.enso.interpreter.node.expression.builtin.bool.True + ide_ci::fs::remove_if_exists(&self.paths.repo_root.engine.runtime.target)?; + + // We want to start this earlier, and await only before Engine build starts. + let perhaps_generate_java_from_rust_job = + ide_ci::future::perhaps(self.config.generate_java_from_rust, || { + crate::rust::parser::generate_java(&self.paths.repo_root) + }); + + // Download Project Template Files + let client = reqwest::Client::new(); + download_project_templates(client.clone(), self.paths.repo_root.path.clone()).await?; + + // let sbt = WithCwd::new(Sbt, &self.paths.repo_root); + + let mut system = sysinfo::System::new(); + system.refresh_memory(); + trace!("Total memory: {}", system.total_memory()); + trace!("Available memory: {}", system.available_memory()); + trace!("Used memory: {}", system.used_memory()); + trace!("Free memory: {}", system.free_memory()); + + // Build packages. + debug!("Bootstrapping Enso project."); + let sbt = engine::sbt::Context { + repo_root: self.paths.repo_root.path.clone(), + system_properties: vec![sbt::SystemProperty::new( + "bench.compileOnly", + self.config.execute_benchmarks_once.to_string(), + )], + }; + + sbt.call_arg("bootstrap").await?; + + perhaps_generate_java_from_rust_job.await.transpose()?; + let perhaps_test_java_generated_from_rust_job = + ide_ci::future::perhaps(self.config.test_java_generated_from_rust, || { + crate::rust::parser::run_self_tests(&self.paths.repo_root) + }); + + // If we have much memory, we can try building everything in a single batch. Reducing number + // of SBT invocations significantly helps build time. However, it is more memory heavy, so + // we don't want to call this in environments like GH-hosted runners. + let github_hosted_macos_memory = 15_032_385; + let big_memory_machine = system.total_memory() > github_hosted_macos_memory; + // Windows native runner is not yet supported. + let build_native_runner = + self.config.build_engine_package() && big_memory_machine && TARGET_OS != OS::Windows; + + if big_memory_machine { + let mut tasks = vec![]; + + if self.config.build_engine_package() { + tasks.push("buildEngineDistribution"); + tasks.push("engine-runner/assembly"); + ret.packages.engine = Some(self.paths.engine.clone()); + } + if build_native_runner { + tasks.push("engine-runner-native/buildNativeImage"); + } + + if TARGET_OS != OS::Windows { + // FIXME [mwu] apparently this is broken on Windows because of the line endings + // mismatch + tasks.push("verifyLicensePackages"); + } + + if self.config.build_project_manager_package() { + tasks.push("buildProjectManagerDistribution"); + ret.packages.project_manager = Some(self.paths.project_manager.clone()); + } + + if self.config.build_launcher_package() { + tasks.push("buildLauncherDistribution"); + ret.packages.launcher = Some(self.paths.launcher.clone()); + } + + // This just compiles benchmarks, not run them. At least we'll know that they can be + // run. Actually running them, as part of this routine, would be too heavy. + // TODO [mwu] It should be possible to run them through context config option. + if self.config.build_benchmarks { + tasks.extend([ + "runtime/Benchmark/compile", + "language-server/Benchmark/compile", + "searcher/Benchmark/compile", + ]); + } + + for benchmark in &self.config.execute_benchmarks { + tasks.push(benchmark.sbt_task()); + } + + if !tasks.is_empty() { + let build_stuff = Sbt::concurrent_tasks(tasks); + sbt.call_arg(build_stuff).await?; + } + } else { + // Compile + sbt.call_arg("compile").await?; + + // Build the Runner & Runtime Uberjars + sbt.call_arg("engine-runner/assembly").await?; + + // Build the Launcher Native Image + sbt.call_arg("launcher/assembly").await?; + sbt.call_args(&["--mem", "1536", "launcher/buildNativeImage"]).await?; + + // Build the PM Native Image + sbt.call_arg("project-manager/assembly").await?; + sbt.call_args(&["--mem", "1536", "project-manager/buildNativeImage"]).await?; + + // Prepare Launcher Distribution + //create_launcher_package(&paths)?; + sbt.call_arg("buildLauncherDistribution").await?; + + // Prepare Engine Distribution + sbt.call_arg("buildEngineDistribution").await?; + + // Prepare Project Manager Distribution + sbt.call_arg("buildProjectManagerDistribution").await?; + + if self.config.build_benchmarks { + // Check Runtime Benchmark Compilation + sbt.call_arg("runtime/Benchmark/compile").await?; + + // Check Language Server Benchmark Compilation + sbt.call_arg("language-server/Benchmark/compile").await?; + + // Check Searcher Benchmark Compilation + sbt.call_arg("searcher/Benchmark/compile").await?; + } + + for benchmark in &self.config.execute_benchmarks { + sbt.call_arg(benchmark.sbt_task()).await?; + } + } + + // If we were running any benchmarks, they are complete by now. Upload the report. + if is_in_env() { + let path = &self.paths.repo_root.engine.runtime.bench_report_xml; + if path.exists() { + ide_ci::actions::artifacts::upload_single_file( + &self.paths.repo_root.engine.runtime.bench_report_xml, + "Runtime Benchmark Report", + ) + .await?; + } else { + info!("No benchmark file found at {}, nothing to upload.", path.display()); + } + } + + if self.config.test_scala { + // Test Enso + sbt.call_arg("set Global / parallelExecution := false; test").await?; + } + + perhaps_test_java_generated_from_rust_job.await.transpose()?; + + // === Build Distribution === + if self.config.generate_documentation { + // FIXME [mwu] + // docs-generator fails on Windows because it can't understand non-Unix-style paths. + if TARGET_OS != OS::Windows { + // Build the docs from standard library sources. + sbt.call_arg("docs-generator/run").await?; + } + } + + if self.config.build_js_parser { + // Build the Parser JS Bundle + sbt.call_arg("syntaxJS/fullOptJS").await?; + ide_ci::fs::copy_to( + self.paths.target.join("scala-parser.js"), + self.paths.target.join("parser-upload"), + )?; + } + + + let enso = BuiltEnso { paths: self.paths.clone() }; + if self.config.test_standard_library { + enso.run_tests(IrCaches::No, PARALLEL_ENSO_TESTS).await?; + } + + if self.config.build_engine_package() { + let std_libs = self.paths.engine.dir.join("lib").join("Standard"); + // Compile the Standard Libraries (Unix) + debug!("Compiling standard libraries under {}", std_libs.display()); + for entry in ide_ci::fs::read_dir(&std_libs)? { + let entry = entry?; + let target = entry.path().join(self.paths.version().to_string()); + enso.compile_lib(target)?.run_ok().await?; + } + } + + if self.config.test_standard_library { + enso.run_tests(IrCaches::Yes, PARALLEL_ENSO_TESTS).await?; + } + + // if build_native_runner { + // Command::new("./runner") + // .current_dir(&self.repo_root) + // .args(["--run", "./engine/runner-native/src/test/resources/Factorial.enso"]) + // .run_ok() + // .await?; + // } + + // Verify License Packages in Distributions + // FIXME apparently this does not work on Windows due to some CRLF issues? + if self.config.verify_packages && TARGET_OS != OS::Windows { + /* refversion=${{ env.ENSO_VERSION }} + binversion=${{ env.DIST_VERSION }} + engineversion=$(${{ env.ENGINE_DIST_DIR }}/bin/enso --version --json | jq -r '.version') + test $binversion = $refversion || (echo "Tag version $refversion and the launcher version $binversion do not match" && false) + test $engineversion = $refversion || (echo "Tag version $refversion and the engine version $engineversion do not match" && false) + */ + + if self.config.build_engine_package() { + sbt.verify_generated_package("engine", &self.paths.engine.dir).await?; + } + if self.config.build_launcher_package() { + sbt.verify_generated_package("launcher", &self.paths.launcher.dir).await?; + } + if self.config.build_project_manager_package() { + sbt.verify_generated_package("project-manager", &self.paths.project_manager.dir) + .await?; + } + if self.config.build_engine_package { + for libname in ["Base", "Table", "Image", "Database"] { + let lib_path = self + .paths + .engine + .dir + .join_iter(["lib", "Standard", libname]) + .join(self.paths.version().to_string()); + sbt.verify_generated_package(libname, lib_path).await?; + } + } + } + + if self.config.build_engine_package { + if TARGET_OS == OS::Linux && ide_ci::ci::run_in_ci() { + self.paths.upload_edition_file_artifact().await?; + } + + let schema_dir = self.paths.repo_root.join_iter([ + "engine", + "language-server", + "src", + "main", + "schema", + ]); + if is_in_env() { + ide_ci::actions::artifacts::upload_compressed_directory(&schema_dir, "fbs-schema") + .await?; + } + } + + if self.config.build_launcher_bundle { + ret.bundles.launcher = + Some(crate::engine::bundle::Launcher::create(&self.paths).await?); + } + + if self.config.build_project_manager_bundle { + ret.bundles.project_manager = + Some(crate::engine::bundle::ProjectManager::create(&self.paths).await?); + } + + Ok(ret) + } + + pub async fn execute(&self, operation: Operation) -> Result { + match &operation { + Operation::Release(ReleaseOperation { command, repo }) => match command { + ReleaseCommand::Upload => { + let artifacts = self.build().await?; + + // Make packages. + let release_id = crate::env::ReleaseId.fetch()?; + let client = ide_ci::github::create_client(retrieve_github_access_token()?)?; + let upload_asset = |asset: PathBuf| { + ide_ci::github::release::upload_asset(repo, &client, release_id, asset) + }; + for package in artifacts.packages.into_iter() { + package.pack().await?; + upload_asset(package.artifact_archive).await?; + } + for bundle in artifacts.bundles.into_iter() { + bundle.pack().await?; + upload_asset(bundle.artifact_archive).await?; + } + if TARGET_OS == OS::Linux { + upload_asset(self.paths.manifest_file()).await?; + upload_asset(self.paths.launcher_manifest_file()).await?; + } + } + }, + Operation::Run(run) => { + // Build environment preparations. + self.prepare_build_env().await?; + let mut run = run.command_pieces.iter(); + if let Some(program) = run.next() { + debug!("Resolving program: {}", program.as_str()); + let exe_path = ide_ci::program::lookup(program.as_str())?; + ide_ci::program::Command::new(exe_path) + .args(run) + .current_dir(&self.paths.repo_root) + .spawn()? + .wait() + .await? + .exit_ok()?; + } else { + debug!("Spawning default shell."); + let mut shell = + DEFAULT_SHELL.run_shell()?.current_dir(&self.paths.repo_root).spawn()?; + shell.wait_ok().await?; + } + } + Operation::Build => { + self.build().boxed().await?; + } + }; + + Ok(()) + } +} diff --git a/build/build/src/engine/env.rs b/build/build/src/engine/env.rs new file mode 100644 index 0000000000..c39091e64a --- /dev/null +++ b/build/build/src/engine/env.rs @@ -0,0 +1,21 @@ +//! Environment variables used by the engine's SBT-based build system. + +//use crate::prelude::*; + +use ide_ci::env::Variable; + + + +#[derive(Clone, Copy, Debug)] +pub struct CiTestTimeFactor; +impl Variable for CiTestTimeFactor { + const NAME: &'static str = "CI_TEST_TIMEFACTOR"; + type Value = usize; +} + +#[derive(Clone, Copy, Debug)] +pub struct CiFlakyTestEnable; +impl Variable for CiFlakyTestEnable { + const NAME: &'static str = "CI_TEST_FLAKY_ENABLE"; + type Value = bool; +} diff --git a/build/build/src/engine/sbt.rs b/build/build/src/engine/sbt.rs new file mode 100644 index 0000000000..99e20df80e --- /dev/null +++ b/build/build/src/engine/sbt.rs @@ -0,0 +1,46 @@ +//! This module wraps SBT commands that are provided by the Enso Engine's SBT build scripts. + +use crate::prelude::*; + +use ide_ci::program::command::provider::CommandProviderExt; +use ide_ci::programs::sbt; +use ide_ci::programs::Sbt; + + + +pub fn verify_generated_package_task(package: &str, path: impl AsRef) -> String { + format!( + "enso/verifyGeneratedPackage {} {}", + package, + path.as_ref().join("THIRD-PARTY").display() + ) +} + +pub trait SbtCommandProvider: CommandProvider { + fn verify_generated_package( + &self, + package: &str, + path: impl AsRef, + ) -> BoxFuture<'static, Result> { + self.call_arg(verify_generated_package_task(package, path)) + } +} + +#[derive(Clone, Debug)] +pub struct Context { + pub repo_root: PathBuf, + pub system_properties: Vec, +} + +impl CommandProvider for Context { + fn command(&self) -> Result { + let mut cmd = Sbt.cmd()?; + cmd.current_dir(&self.repo_root); + for property in &self.system_properties { + cmd.args(property); + } + Ok(cmd) + } +} + +impl SbtCommandProvider for Context {} diff --git a/build/build/src/enso.rs b/build/build/src/enso.rs new file mode 100644 index 0000000000..8f7dee9b13 --- /dev/null +++ b/build/build/src/enso.rs @@ -0,0 +1,156 @@ +use crate::prelude::*; + +use crate::paths::Paths; +use crate::postgres; +use crate::postgres::EndpointConfiguration; +use crate::postgres::Postgresql; + +use ide_ci::env::Variable; +use ide_ci::future::AsyncPolicy; +use ide_ci::programs::docker::ContainerId; + + + +ide_ci::define_env_var! { + ENSO_JVM_OPTS, String; +} + +#[derive(Copy, Clone, Debug)] +pub enum IrCaches { + Yes, + No, +} + +impl IrCaches { + pub fn flag(self) -> &'static str { + match self { + IrCaches::Yes => "--ir-caches", + IrCaches::No => "--no-ir-caches", + } + } +} + +impl AsRef for IrCaches { + fn as_ref(&self) -> &OsStr { + self.flag().as_ref() + } +} + +#[derive(Clone, Debug)] +pub struct BuiltEnso { + pub paths: Paths, +} + +impl BuiltEnso { + pub fn wrapper_script_path(&self) -> PathBuf { + self.paths.engine.dir.join("bin").join("enso") + } + + pub fn run_test(&self, test: impl AsRef, ir_caches: IrCaches) -> Result { + let test_path = self.paths.stdlib_test(test); + let mut command = self.cmd()?; + command + .arg(ir_caches) + .arg("--run") + .arg(test_path) + // This flag enables assertions in the JVM. Some of our stdlib tests had in the past + // failed on Graal/Truffle assertions, so we want to have them triggered. + .set_env(ENSO_JVM_OPTS, &ide_ci::programs::java::Option::EnableAssertions.as_ref())?; + Ok(command) + } + + pub fn compile_lib(&self, target: impl AsRef) -> Result { + ide_ci::fs::require_exist(&target)?; + let mut command = self.cmd()?; + command + .arg(IrCaches::Yes) + .args(["--no-compile-dependencies", "--no-global-cache", "--compile"]) + .arg(target.as_ref()); + Ok(command) + } + + pub async fn run_tests(&self, ir_caches: IrCaches, async_policy: AsyncPolicy) -> Result { + let paths = &self.paths; + // Prepare Engine Test Environment + if let Ok(gdoc_key) = std::env::var("GDOC_KEY") { + let google_api_test_data_dir = + paths.repo_root.join("test").join("Google_Api_Test").join("data"); + ide_ci::fs::create_dir_if_missing(&google_api_test_data_dir)?; + ide_ci::fs::write(google_api_test_data_dir.join("secret.json"), &gdoc_key)?; + } + + let _httpbin = crate::httpbin::get_and_spawn_httpbin_on_free_port().await?; + let _postgres = match TARGET_OS { + OS::Linux => { + let runner_context_string = crate::env::RunnerContainerName + .fetch() + .map(|name| name.0) + .or_else(|_| ide_ci::actions::env::RUNNER_NAME.get()) + .unwrap_or_else(|_| Uuid::new_v4().to_string()); + // GH-hosted runners are named like "GitHub Actions 10". Spaces are not allowed in + // the container name. + let container_name = + iformat!("postgres-for-{runner_context_string}").replace(' ', "_"); + let config = postgres::Configuration { + postgres_container: ContainerId(container_name), + database_name: "enso_test_db".to_string(), + user: "enso_test_user".to_string(), + password: "enso_test_password".to_string(), + endpoint: EndpointConfiguration::deduce()?, + version: "latest".to_string(), + }; + let postgres = Postgresql::start(config).await?; + Some(postgres) + } + _ => None, + }; + + let futures = crate::paths::LIBRARIES_TO_TEST.map(ToString::to_string).map(|test| { + let command = self.run_test(test, ir_caches); + async move { command?.run_ok().await } + }); + + // We need to join all the test tasks here, as they require postgres and httpbin alive. + // Could share them with Arc but then scenario of multiple test runs being run in parallel + // should be handled, e.g. avoiding port collisions. + let results = ide_ci::future::join_all(futures, async_policy).await; + let errors = results.into_iter().filter_map(Result::err).collect::>(); + if errors.is_empty() { + Ok(()) + } else { + error!("{} test suit(s) failed.", errors.len()); + for error in &errors { + error!("{}", error); + } + bail!("Standard library tests failed. Details: {:?}.", errors); + } + } +} + +#[async_trait] +impl Program for BuiltEnso { + fn executable_name(&self) -> &str { + ide_ci::platform::DEFAULT_SHELL.executable_name() + } + + fn cmd(&self) -> Result { + ide_ci::platform::DEFAULT_SHELL.run_script(self.wrapper_script_path()) + } + + fn version_string(&self) -> BoxFuture<'static, Result> { + let command = self.cmd(); + async move { command?.args(["version", "--json", "--only-launcher"]).run_stdout().await } + .boxed() + } + + async fn version(&self) -> Result { + #[derive(Clone, Debug, Deserialize)] + struct VersionInfo { + version: Version, + } + + let stdout = self.version_string().await?; + let version = serde_json::from_str::(&stdout)?; + Ok(version.version) + } +} diff --git a/build/build/src/env.rs b/build/build/src/env.rs new file mode 100644 index 0000000000..a10d915498 --- /dev/null +++ b/build/build/src/env.rs @@ -0,0 +1,28 @@ +#[allow(unused_imports)] +use crate::prelude::*; + +use ide_ci::env::Variable; +use ide_ci::programs::docker::ContainerId; + + + +#[derive(Clone, Copy, Debug)] +pub struct ReleaseId; +impl Variable for ReleaseId { + const NAME: &'static str = "ENSO_RELEASE_ID"; + type Value = octocrab::models::ReleaseId; +} + +#[derive(Clone, Copy, Debug)] +pub struct RunnerContainerName; +impl Variable for RunnerContainerName { + const NAME: &'static str = "ENSO_RUNNER_CONTAINER_NAME"; + type Value = ContainerId; +} + +#[derive(Clone, Copy, Debug)] +pub struct NightlyEditionsLimit; +impl Variable for NightlyEditionsLimit { + const NAME: &'static str = "ENSO_NIGHTLY_EDITIONS_LIMIT"; + type Value = usize; +} diff --git a/build/build/src/httpbin.rs b/build/build/src/httpbin.rs new file mode 100644 index 0000000000..94d4c30a1e --- /dev/null +++ b/build/build/src/httpbin.rs @@ -0,0 +1,73 @@ +use crate::prelude::*; + +use ide_ci::env::Variable; +use ide_ci::programs::Go; +use tokio::process::Child; + + + +pub mod env { + /// Environment variable that stores URL under which spawned httpbin server is available. + #[derive(Clone, Copy, Debug)] + pub struct Url; + impl ide_ci::env::Variable for Url { + const NAME: &'static str = "ENSO_HTTP_TEST_HTTPBIN_URL"; + type Value = url::Url; + } +} + +#[derive(Debug)] +pub struct Spawned { + pub process: Child, + pub url: Url, +} + +pub async fn get_and_spawn_httpbin(port: u16) -> Result { + Go.cmd()? + .args(["install", "-v", "github.com/ahmetb/go-httpbin/cmd/httpbin@latest"]) + .run_ok() + .await?; + let gopath = Go.cmd()?.args(["env", "GOPATH"]).run_stdout().await?; + let gopath = gopath.trim(); + let gopath = PathBuf::from(gopath); // be careful of trailing newline! + let program = gopath.join("bin").join("httpbin"); + debug!("Will spawn {}", program.display()); + let process = Command::new(program) // TODO? wrap in Program? + .args(["-host", &format!(":{port}")]) + .kill_on_drop(true) + .spawn_intercepting() + .anyhow_err()?; + + let url_string = format!("http://localhost:{port}"); + let url = Url::parse(&url_string)?; + env::Url.set(&url); + Ok(Spawned { url, process }) +} + +impl Drop for Spawned { + fn drop(&mut self) { + debug!("Dropping the httpbin wrapper."); + env::Url.remove(); + } +} + +pub async fn get_and_spawn_httpbin_on_free_port() -> Result { + get_and_spawn_httpbin(ide_ci::get_free_port()?).await +} + +#[cfg(test)] +mod tests { + use crate::project::ProcessWrapper; + + use super::*; + + + #[tokio::test] + #[ignore] + async fn spawn() -> Result { + let mut spawned = get_and_spawn_httpbin_on_free_port().await?; + dbg!(&spawned); + spawned.process.wait_ok().await?; + Ok(()) + } +} diff --git a/build/build/src/ide.rs b/build/build/src/ide.rs new file mode 100644 index 0000000000..dc40efbaf1 --- /dev/null +++ b/build/build/src/ide.rs @@ -0,0 +1,8 @@ +// use crate::prelude::*; + + +// ============== +// === Export === +// ============== + +pub mod web; diff --git a/build/build/src/ide/web.rs b/build/build/src/ide/web.rs new file mode 100644 index 0000000000..157a3e8241 --- /dev/null +++ b/build/build/src/ide/web.rs @@ -0,0 +1,410 @@ +use crate::prelude::*; + +use crate::ide::web::env::CSC_KEY_PASSWORD; +use crate::paths::generated; +use crate::project::gui::BuildInfo; +use crate::project::wasm; +use crate::project::ProcessWrapper; + +use anyhow::Context; +use futures_util::future::try_join; +use futures_util::future::try_join4; +use ide_ci::io::download_all; +use ide_ci::models::config::RepoContext; +use ide_ci::program::command; +use ide_ci::program::EMPTY_ARGS; +use ide_ci::programs::node::NpmCommand; +use ide_ci::programs::Npm; +use octocrab::models::repos::Content; +use std::process::Stdio; +use tempfile::TempDir; +use tokio::process::Child; +use tracing::Span; + + + +lazy_static! { + /// Path to the file with build information that is consumed by the JS part of the IDE. + /// + /// The file must follow the schema of type [`BuildInfo`]. + pub static ref BUILD_INFO: PathBuf = PathBuf::from("build.json"); +} + +pub const IDE_ASSETS_URL: &str = + "https://github.com/enso-org/ide-assets/archive/refs/heads/main.zip"; + +pub const ARCHIVED_ASSET_FILE: &str = "ide-assets-main/content/assets/"; + +pub const GOOGLE_FONTS_REPOSITORY: &str = "google/fonts"; + +pub const GOOGLE_FONT_DIRECTORY: &str = "ofl"; + +pub mod env { + use super::*; + + use ide_ci::define_env_var; + + define_env_var! { + ENSO_BUILD_IDE, PathBuf; + ENSO_BUILD_PROJECT_MANAGER, PathBuf; + ENSO_BUILD_GUI, PathBuf; + ENSO_BUILD_ICONS, PathBuf; + ENSO_BUILD_GUI_WASM, PathBuf; + ENSO_BUILD_GUI_JS_GLUE, PathBuf; + ENSO_BUILD_GUI_ASSETS, PathBuf; + ENSO_BUILD_IDE_BUNDLED_ENGINE_VERSION, Version; + ENSO_BUILD_PROJECT_MANAGER_IN_BUNDLE_PATH, PathBuf; + } + + // === Electron Builder === + // Variables introduced by the Electron Builder itself. + // See: https://www.electron.build/code-signing + + define_env_var! { + /// The HTTPS link (or base64-encoded data, or file:// link, or local path) to certificate + /// (*.p12 or *.pfx file). Shorthand ~/ is supported (home directory). + WIN_CSC_LINK, String; + + /// The password to decrypt the certificate given in WIN_CSC_LINK. + WIN_CSC_KEY_PASSWORD, String; + + /// The HTTPS link (or base64-encoded data, or file:// link, or local path) to certificate + /// (*.p12 or *.pfx file). Shorthand ~/ is supported (home directory). + CSC_LINK, String; + + /// The password to decrypt the certificate given in CSC_LINK. + CSC_KEY_PASSWORD, String; + + /// The username of apple developer account. + APPLEID, String; + + /// The app-specific password (not Apple ID password). See: + /// https://support.apple.com/HT204397 + APPLEIDPASS, String; + } +} + +#[derive(Clone, Debug)] +pub struct IconsArtifacts(pub PathBuf); + +impl command::FallibleManipulator for IconsArtifacts { + fn try_applying(&self, command: &mut C) -> Result { + command.set_env(env::ENSO_BUILD_ICONS, &self.0)?; + Ok(()) + } +} + +#[context("Failed to download Google font '{family}'.")] +#[instrument(fields(output_path = %output_path.as_ref().display()), ret, err, skip(octocrab))] +pub async fn download_google_font( + octocrab: &Octocrab, + family: &str, + output_path: impl AsRef, +) -> Result> { + let destination_dir = output_path.as_ref(); + let repo = RepoContext::from_str(GOOGLE_FONTS_REPOSITORY)?; + let path = format!("{GOOGLE_FONT_DIRECTORY}/{family}"); + let files = repo.repos(octocrab).get_content().path(path).send().await?; + let ttf_files = + files.items.into_iter().filter(|file| file.name.ends_with(".ttf")).collect_vec(); + for file in &ttf_files { + let destination_file = destination_dir.join(&file.name); + let url = file.download_url.as_ref().context("Missing 'download_url' in the reply.")?; + let reply = ide_ci::io::web::client::download(&octocrab.client, url).await?; + ide_ci::io::web::stream_to_file(reply, &destination_file).await?; + } + Ok(ttf_files) +} + +/// Fill the directory under `output_path` with the assets. +pub async fn download_js_assets(output_path: impl AsRef) -> Result { + let output = output_path.as_ref(); + let archived_asset_prefix = PathBuf::from(ARCHIVED_ASSET_FILE); + let archive = download_all(IDE_ASSETS_URL).await?; + let mut archive = zip::ZipArchive::new(std::io::Cursor::new(archive))?; + ide_ci::archive::zip::extract_subtree(&mut archive, &archived_asset_prefix, output)?; + Ok(()) +} + +#[derive(Clone, Copy, Debug)] +pub enum Workspaces { + Icons, + Content, + /// The Electron client. + Enso, +} + +impl AsRef for Workspaces { + fn as_ref(&self) -> &OsStr { + match self { + Workspaces::Icons => OsStr::new("enso-studio-icons"), + Workspaces::Content => OsStr::new("enso-studio-content"), + Workspaces::Enso => OsStr::new("enso"), + } + } +} + +#[derive(Clone, Copy, Debug)] +pub enum Command { + Build, + Watch, +} + +/// Things that are common to `watch` and `build`. +#[derive(Debug)] +pub struct ContentEnvironment { + asset_dir: Assets, + wasm: wasm::Artifact, + output_path: Output, +} + +impl> ContentEnvironment { + pub async fn new( + ide: &IdeDesktop, + wasm: impl Future>, + build_info: &BuildInfo, + output_path: Output, + ) -> Result { + let installation = ide.install(); + let asset_dir = TempDir::new()?; + let assets_download = download_js_assets(&asset_dir); + let fonts_download = download_google_font(&ide.octocrab, "mplus1", &asset_dir); + let (wasm, _, _, _) = + try_join4(wasm, installation, assets_download, fonts_download).await?; + ide.write_build_info(build_info)?; + Ok(ContentEnvironment { asset_dir, wasm, output_path }) + } +} + +impl, Output: AsRef> command::FallibleManipulator + for ContentEnvironment +{ + fn try_applying(&self, command: &mut C) -> Result { + command + .set_env(env::ENSO_BUILD_GUI, self.output_path.as_ref())? + .set_env(env::ENSO_BUILD_GUI_WASM, &self.wasm.wasm())? + .set_env(env::ENSO_BUILD_GUI_JS_GLUE, &self.wasm.js_glue())? + .set_env(env::ENSO_BUILD_GUI_ASSETS, self.asset_dir.as_ref())?; + Ok(()) + } +} + +impl Drop for ContentEnvironment { + fn drop(&mut self) { + info!("Dropping content environment.") + } +} + +pub fn target_flag(os: OS) -> Result<&'static str> { + match os { + OS::Windows => Ok("--win"), + OS::Linux => Ok("--linux"), + OS::MacOS => Ok("--mac"), + _ => bail!("Not supported target for Electron client: {os}."), + } +} + +#[derive(Clone, Debug)] +pub struct IdeDesktop { + pub build_sbt: generated::RepoRootBuildSbt, + pub package_dir: generated::RepoRootAppIdeDesktop, + pub octocrab: Octocrab, + pub cache: ide_ci::cache::Cache, +} + +impl IdeDesktop { + pub fn new( + repo_root: &generated::RepoRoot, + octocrab: Octocrab, + cache: ide_ci::cache::Cache, + ) -> Self { + Self { + build_sbt: repo_root.build_sbt.clone(), + package_dir: repo_root.app.ide_desktop.clone(), + octocrab, + cache, + } + } + + pub fn npm(&self) -> Result { + let mut command = Npm.cmd()?; + command.arg("--color").arg("always"); + command.arg("--yes"); + command.current_dir(&self.package_dir); + command.stdin(Stdio::null()); // nothing in that process subtree should require input + Ok(command) + } + + pub fn write_build_info(&self, info: &BuildInfo) -> Result { + let path = self.package_dir.join(&*BUILD_INFO); + path.write_as_json(info) + } + + pub async fn install(&self) -> Result { + self.npm()?.install().run_ok().await?; + self.npm()?.install().arg("--workspaces").run_ok().await?; + Ok(()) + } + + pub async fn build_icons(&self, output_path: impl AsRef) -> Result { + self.npm()? + .workspace(Workspaces::Icons) + .set_env(env::ENSO_BUILD_ICONS, output_path.as_ref())? + .run("build", EMPTY_ARGS) + .run_ok() + .await?; + Ok(IconsArtifacts(output_path.as_ref().into())) + } + + #[tracing::instrument(name="Building IDE Content.", skip_all, fields( + dest = %output_path.as_ref().display(), + build_info, + err))] + pub async fn build_content( + &self, + wasm: impl Future>, + build_info: &BuildInfo, + output_path: impl AsRef, + ) -> Result { + let env = ContentEnvironment::new(self, wasm, build_info, output_path).await?; + //env.apply(); + self.npm()? + .try_applying(&env)? + .workspace(Workspaces::Content) + .run("build", EMPTY_ARGS) + .run_ok() + .await?; + + debug!(assets=?env.asset_dir, "Still kept"); + drop(env); // does this extend the lifetime? + Ok(()) + } + + + #[tracing::instrument(name="Setting up GUI Content watcher.", + fields(wasm = tracing::field::Empty), + err)] + pub async fn watch_content( + &self, + wasm: impl Future>, + build_info: &BuildInfo, + shell: bool, + ) -> Result { + // When watching we expect our artifacts to be served through server, not appear in any + // specific location on the disk. + let output_path = TempDir::new()?; + // let span = tracing:: + // let wasm = wasm.inspect() + let watch_environment = + ContentEnvironment::new(self, wasm, build_info, output_path).await?; + Span::current().record("wasm", watch_environment.wasm.as_str()); + let child_process = if shell { + ide_ci::os::default_shell() + .cmd()? + .current_dir(&self.package_dir) + .try_applying(&watch_environment)? + .stdin(Stdio::inherit()) + .spawn()? + } else { + self.npm()? + .try_applying(&watch_environment)? + .workspace(Workspaces::Content) + .run("watch", EMPTY_ARGS) + .spawn_intercepting()? + }; + Ok(Watcher { child_process, watch_environment }) + } + + #[tracing::instrument(name="Preparing distribution of the IDE.", skip_all, fields( + dest = %output_path.as_ref().display(), + ?gui, + ?project_manager, + ?target_os, + err))] + pub async fn dist( + &self, + gui: &crate::project::gui::Artifact, + project_manager: &crate::project::backend::Artifact, + output_path: impl AsRef, + target_os: OS, + ) -> Result { + if TARGET_OS == OS::MacOS && CSC_KEY_PASSWORD.is_set() { + // This means that we will be doing code signing on MacOS. This requires JDK environment + // to be set up. + let graalvm = + crate::engine::deduce_graal(self.octocrab.clone(), &self.build_sbt).await?; + graalvm.install_if_missing(&self.cache).await?; + } + + self.npm()?.install().run_ok().await?; + + let engine_version_to_use = project_manager.engine_versions.iter().max(); + if engine_version_to_use.is_none() { + warn!("Bundled Project Manager does not contain any Engine."); + } + + let pm_in_bundle = project_manager + .path + .bin + .project_managerexe + .strip_prefix(&project_manager.path) + .context("Failed to generate in-bundle path to Project Manager executable")?; + + let content_build = self + .npm()? + .set_env(env::ENSO_BUILD_GUI, gui.as_ref())? + .set_env(env::ENSO_BUILD_PROJECT_MANAGER, project_manager.as_ref())? + .set_env(env::ENSO_BUILD_IDE, output_path.as_ref())? + .set_env_opt(env::ENSO_BUILD_IDE_BUNDLED_ENGINE_VERSION, engine_version_to_use)? + .set_env(env::ENSO_BUILD_PROJECT_MANAGER_IN_BUNDLE_PATH, pm_in_bundle)? + .workspace(Workspaces::Enso) + .run("build", EMPTY_ARGS) + .run_ok(); + + // &input.repo_root.dist.icons + let icons_dist = TempDir::new()?; + let icons_build = self.build_icons(&icons_dist); + let (icons, _content) = try_join(icons_build, content_build).await?; + + + self.npm()? + .try_applying(&icons)? + // .env("DEBUG", "electron-builder") + .set_env(env::ENSO_BUILD_GUI, gui.as_ref())? + .set_env(env::ENSO_BUILD_IDE, output_path.as_ref())? + .set_env(env::ENSO_BUILD_PROJECT_MANAGER, project_manager.as_ref())? + .workspace(Workspaces::Enso) + // .args(["--loglevel", "verbose"]) + .run("dist", EMPTY_ARGS) + .arg("--") + .arg(target_flag(target_os)?) + .run_ok() + .await?; + + Ok(()) + } +} + +#[derive(Debug)] +pub struct Watcher { + pub watch_environment: ContentEnvironment, + pub child_process: Child, +} + +impl ProcessWrapper for Watcher { + fn inner(&mut self) -> &mut Child { + &mut self.child_process + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn download_test() -> Result { + let temp = TempDir::new()?; + download_js_assets(temp.path()).await?; + Ok(()) + } +} diff --git a/build/build/src/lib.rs b/build/build/src/lib.rs new file mode 100644 index 0000000000..40fa729023 --- /dev/null +++ b/build/build/src/lib.rs @@ -0,0 +1,200 @@ +// === Features === +#![feature(hash_set_entry)] +#![feature(type_alias_impl_trait)] +#![feature(trait_alias)] +#![feature(let_chains)] +#![feature(exit_status_error)] +#![feature(async_closure)] +#![feature(associated_type_bounds)] +#![feature(option_result_contains)] +#![feature(result_flattening)] +#![feature(default_free_fn)] +#![feature(map_first_last)] +#![feature(result_option_inspect)] +#![feature(associated_type_defaults)] +#![feature(once_cell)] +#![feature(duration_constants)] +#![feature(slice_take)] +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] +// === Non-Standard Linter Configuration === +#![warn(missing_copy_implementations)] +#![warn(missing_debug_implementations)] +#![warn(trivial_casts)] +#![warn(trivial_numeric_casts)] +#![warn(unused_import_braces)] +#![warn(unused_qualifications)] + +use crate::prelude::*; + +use anyhow::Context; +use ide_ci::programs::java; +use regex::Regex; + + + +pub mod prelude { + pub use ide_ci::prelude::*; +} + +pub mod aws; +pub mod bump_version; +pub mod changelog; +pub mod ci; +pub mod config; +pub mod context; +pub mod engine; +pub mod enso; +pub mod env; +pub mod httpbin; +pub mod ide; +pub mod paths; +pub mod postgres; +pub mod prettier; +pub mod programs; +pub mod project; +pub mod project_manager; +pub mod release; +pub mod repo; +pub mod rust; +pub mod source; +pub mod version; + +/// Get version of Enso from the `build.sbt` file contents. +pub fn get_enso_version(build_sbt_contents: &str) -> Result { + let version_regex = Regex::new(r#"(?m)^val *ensoVersion *= *"([^"]*)".*$"#)?; + let version_string = version_regex + .captures(build_sbt_contents) + .context("Failed to find line with version string.")? + .get(1) + // The `expect` below will not fail due to the regex definition, as is ensured by unit test. + .expect("Missing subcapture #1 with version despite matching the regex.") + .as_str(); + Version::parse(version_string).anyhow_err() +} + +pub fn get_string_assignment_value( + build_sbt_contents: &str, + variable_name: &str, +) -> Result { + let regex_text = format!(r#"(?m)^val *{variable_name} *= *"([^"]*)".*$"#); + let regex = Regex::new(®ex_text)?; + Ok(regex + .captures(build_sbt_contents) + .context(format!( + "Failed to find line with assignment to `{variable_name}`. Does it match the following regex? {regex_text} " + ) + )? + .get(1) + // The below denotes an internal error in our regex syntax, we do want panic. + .expect("Missing subcapture #1 with version despite matching the regex.") + .as_str() + .to_string()) +} + +/// Get version of Enso from the `build.sbt` file contents. +pub fn get_graal_version(build_sbt_contents: &str) -> Result { + get_string_assignment_value(build_sbt_contents, "graalVersion")?.parse2() +} + +/// Get version of Enso from the `build.sbt` file contents. +pub fn get_java_major_version(build_sbt_contents: &str) -> Result { + get_string_assignment_value(build_sbt_contents, "javaVersion")?.parse2() +} + +pub fn retrieve_github_access_token() -> Result { + fn get_token_from_file() -> Result { + let path = + dirs::home_dir().context("Failed to locate home directory.")?.join("GITHUB_TOKEN"); + let content = ide_ci::fs::read_to_string(path)?; + Ok(content.trim().into()) + } + + ide_ci::env::expect_var("GITHUB_TOKEN") + .inspect(|_| debug!("Will use GITHUB_TOKEN environment variable.")) + .or_else(|_| get_token_from_file()) +} + +#[context("Failed to setup GitHub API client.")] +pub async fn setup_octocrab() -> Result { + let mut builder = octocrab::OctocrabBuilder::new(); + if let Ok(access_token) = retrieve_github_access_token() { + builder = builder.personal_token(access_token); + let octocrab = builder.build()?; + match octocrab.ratelimit().get().await { + Ok(rate) => info!( + "GitHub API rate limit: {}/{}.", + rate.resources.core.used, rate.resources.core.limit + ), + Err(e) => bail!( + "Failed to get rate limit info: {e}. GitHub Personal Access Token might be invalid." + ), + } + Ok(octocrab) + } else { + builder.build().anyhow_err() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + #[ignore] + async fn setup_octocrab_test() -> Result { + let _client = setup_octocrab().await?; + Ok(()) + } + + + #[test] + pub fn get_enso_version_test() -> Result { + let contents = r#" +val scalacVersion = "2.13.6" +val rustVersion = "1.58.0-nightly" +val graalVersion = "21.1.0" +val javaVersion = "11" +val ensoVersion = "0.2.32-SNAPSHOT" // Note [Engine And Launcher Version] +val currentEdition = "2021.20-SNAPSHOT" // Note [Default Editions] +val stdLibVersion = ensoVersion +"#; + let version = get_enso_version(contents)?; + assert_eq!(version.major, 0); + assert_eq!(version.minor, 2); + assert_eq!(version.patch, 32); + assert_eq!(version.pre.as_str(), "SNAPSHOT"); + + debug!("{}\n{:?}", version, version); + Ok(()) + } + + #[test] + pub fn get_graal_version_test() -> Result { + let contents = r#" +val scalacVersion = "2.13.7" +val graalVersion = "21.1.0" +val javaVersion = "11" +val defaultDevEnsoVersion = "0.0.0-dev" +val ensoVersion = sys.env.getOrElse( + "ENSO_VERSION", + defaultDevEnsoVersion +) // Note [Engine And Launcher Version] +val currentEdition = sys.env.getOrElse( + "ENSO_EDITION", + defaultDevEnsoVersion +) // Note [Default Editions] + +// Note [Stdlib Version] +val stdLibVersion = defaultDevEnsoVersion +"#; + let version = get_graal_version(contents)?; + assert_eq!(version.major, 21); + assert_eq!(version.minor, 1); + assert_eq!(version.patch, 0); + Ok(()) + } +} diff --git a/build/build/src/paths.rs b/build/build/src/paths.rs new file mode 100644 index 0000000000..eab1e79e4d --- /dev/null +++ b/build/build/src/paths.rs @@ -0,0 +1,304 @@ +use crate::prelude::*; + +use crate::version::Versions; + +use std::env::consts::EXE_EXTENSION; +use std::fmt::Formatter; + + + +#[allow(clippy::all)] // [mwu] Little reason to bother in the generated code. +pub mod generated { + include!(concat!(env!("OUT_DIR"), "/paths.rs")); +} + +ide_ci::define_env_var! { + /// Directory where JUnit-format test run results are stored. + /// These are generated as part of the standard library test suite run. + ENSO_TEST_JUNIT_DIR, PathBuf; +} + +pub const EDITION_FILE_ARTIFACT_NAME: &str = "Edition File"; + +pub const LIBRARIES_TO_TEST: [&str; 6] = + ["Tests", "Table_Tests", "Geo_Tests", "Visualization_Tests", "Image_Tests", "Examples_Tests"]; + +pub const ARCHIVE_EXTENSION: &str = match TARGET_OS { + OS::Windows => "zip", + _ => "tar.gz", +}; + +pub fn new_repo_root(repo_root: impl Into, triple: &TargetTriple) -> generated::RepoRoot { + generated::RepoRoot::new_root(repo_root, triple.to_string(), triple.versions.edition_name()) +} + +#[derive(Clone, PartialEq, Eq, Debug, Default)] +pub struct ComponentPaths { + // e.g. `enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64` + pub name: PathBuf, + // e.g. H:\NBO\enso\built-distribution\enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64 + pub root: PathBuf, + // e.g. H:\NBO\enso\built-distribution\enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64\ + // enso-0.0.0-SNAPSHOT.2022-01-19 + pub dir: PathBuf, + // e.g. H:\NBO\enso\built-distribution\enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64.zip + pub artifact_archive: PathBuf, +} + +impl ComponentPaths { + pub fn new( + build_root: &Path, // e.g. H:\NBO\enso\built-distribution + name_prefix: &str, + dirname: &str, + triple: &TargetTriple, + ) -> Self { + let name = PathBuf::from(iformat!("{name_prefix}-{triple.engine()}")); + let root = build_root.join(&name); + let dir = root.join(dirname); + let artifact_archive = root.with_appended_extension(ARCHIVE_EXTENSION); + Self { name, root, dir, artifact_archive } + } + + pub fn emit_to_actions(&self, prefix: &str) -> Result { + let paths = [ + ("NAME", &self.name), + ("ROOT", &self.root), + ("DIR", &self.dir), + ("ARCHIVE", &self.artifact_archive), + ]; + for (what, path) in paths { + ide_ci::actions::workflow::set_env( + &iformat!("{prefix}_DIST_{what}"), + &path.to_string_lossy(), + )?; + } + Ok(()) + } +} + +pub fn pretty_print_arch(arch: Arch) -> &'static str { + match arch { + Arch::X86_64 => "amd64", + Arch::AArch64 => "aarch64", + _ => panic!("Unrecognized architecture {}", arch), + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct TargetTriple { + pub os: OS, + pub arch: Arch, + pub versions: Versions, +} + +impl TargetTriple { + /// Create a new triple with OS and architecture are inferred from the hosting system. + pub fn new(versions: Versions) -> Self { + Self { os: TARGET_OS, arch: TARGET_ARCH, versions } + } + + /// Get the triple effectively used by the Engine build. + /// + /// As the GraalVM we use does not support native Aarch64 builds, it should be treated as amd64 + /// there. + pub fn engine(&self) -> Self { + let mut ret = self.clone(); + ret.arch = if self.arch == Arch::AArch64 && self.os == OS::MacOS { + Arch::X86_64 + } else { + self.arch + }; + ret + } + + /// Pretty prints architecture for our packages. Conform to GraalVM scheme as well. + pub fn arch(&self) -> &'static str { + pretty_print_arch(self.arch) + } +} + +impl Display for TargetTriple { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}-{}-{}", self.versions.version, self.os, self.arch()) + } +} + +#[derive(Clone, Debug)] +pub struct Paths { + pub repo_root: generated::RepoRoot, + pub build_dist_root: PathBuf, + pub target: PathBuf, + pub launcher: ComponentPaths, + pub engine: ComponentPaths, + pub project_manager: ComponentPaths, + pub triple: TargetTriple, + pub test_results: PathBuf, +} + +impl Paths { + pub fn distribution(&self) -> PathBuf { + self.repo_root.join("distribution") + } + + /// Create a new set of paths for building the Enso with a given version number. + pub fn new_triple(repo_root: impl Into, triple: TargetTriple) -> Result { + let repo_root: PathBuf = repo_root.into().absolutize()?.into(); + let repo_root = new_repo_root(repo_root, &triple); + let build_dist_root = repo_root.join("built-distribution"); + let target = repo_root.join("target"); + let launcher = ComponentPaths::new(&build_dist_root, "enso-launcher", "enso", &triple); + let engine = ComponentPaths::new( + &build_dist_root, + "enso-engine", + &format!("enso-{}", &triple.versions.version), + &triple, + ); + let project_manager = + ComponentPaths::new(&build_dist_root, "enso-project-manager", "enso", &triple); + let test_results = target.join("test-results"); + Ok(Paths { + repo_root, + build_dist_root, + target, + launcher, + engine, + project_manager, + triple, + test_results, + }) + } + + /// Create a new set of paths for building the Enso with a given version number. + pub fn new_versions(repo_root: impl Into, versions: Versions) -> Result { + let triple = TargetTriple::new(versions); + Self::new_triple(repo_root, triple) + } + + /// Create a new set of paths for building the Enso with a given version number. + pub fn new_version(repo_root: impl Into, version: Version) -> Result { + let versions = Versions::new(version); + Self::new_versions(repo_root, versions) + } + + /// Sets the environment variables in the current process and in GitHub Actions Runner (if being + /// run in its environment), so future steps of the job also have access to them. + pub fn emit_env_to_actions(&self) -> Result { + let components = [ + ("ENGINE", &self.engine), + ("LAUNCHER", &self.launcher), + ("PROJECTMANAGER", &self.project_manager), + ]; + + for (prefix, paths) in components { + paths.emit_to_actions(prefix)?; + } + + ide_ci::actions::workflow::set_env("TARGET_DIR", &self.target.to_string_lossy())?; + ENSO_TEST_JUNIT_DIR.set_workflow_env(self.test_results.as_path())?; + Ok(()) + } + + pub fn stdlib_tests(&self) -> PathBuf { + self.repo_root.join("test") + } + + pub fn stdlib_test(&self, test_name: impl AsRef) -> PathBuf { + self.stdlib_tests().join(test_name) + } + + pub fn changelog(&self) -> PathBuf { + root_to_changelog(&self.repo_root) + } + + pub fn edition_name(&self) -> String { + self.triple.versions.edition_name() + } + + pub fn manifest_file(&self) -> PathBuf { + self.engine.dir.join("manifest.yaml") + } + + pub fn launcher_manifest_file(&self) -> PathBuf { + self.distribution().join("launcher-manifest.yaml") + } + + // e.g. enso2\distribution\editions\2021.20-SNAPSHOT.yaml + pub fn edition_file(&self) -> PathBuf { + self.distribution() + .join_iter(["editions", &self.edition_name()]) + .with_appended_extension("yaml") + } + + pub async fn upload_edition_file_artifact(&self) -> Result { + ide_ci::actions::artifacts::upload_single_file( + self.edition_file(), + EDITION_FILE_ARTIFACT_NAME, + ) + .await + } + + pub async fn download_edition_file_artifact(&self) -> Result { + ide_ci::actions::artifacts::download_single_file_artifact( + EDITION_FILE_ARTIFACT_NAME, + self.edition_file(), + ) + .await + } + + pub fn version(&self) -> &Version { + &self.triple.versions.version + } +} + +pub fn root_to_changelog(root: impl AsRef) -> PathBuf { + let changelog_filename = "CHANGELOG.md"; + let root_path = root.as_ref().join(changelog_filename); + // TODO: transitional code to support both locations of the changelog + // only the root one should prevail + if root_path.exists() { + root_path + } else { + root.as_ref().join_iter(["app", "gui", changelog_filename]) + } +} + +/// The default value of `ENSO_DATA_DIRECTORY`. +/// See: +pub fn default_data_directory() -> PathBuf { + let project_path = match TARGET_OS { + OS::MacOS => "org.enso", + _ => "enso", + }; + // We can unwrap, because all systems we target define data local directory. + dirs::data_local_dir().unwrap().join(project_path) +} + +/// Get the `ENSO_DATA_DIRECTORY` path. +pub fn data_directory() -> PathBuf { + std::env::var_os("ENSO_DATA_DIRECTORY").map_or_else(default_data_directory, PathBuf::from) +} + +/// Get the place where global IR caches are stored. +pub fn cache_directory() -> PathBuf { + data_directory().join("cache") +} + +pub fn project_manager(base_path: impl AsRef) -> PathBuf { + base_path + .as_ref() + .join_iter(["enso", "bin", "project-manager"]) + .with_appended_extension(EXE_EXTENSION) +} + +/// The path to the first `Cargo.toml` above the given path. +pub fn parent_cargo_toml(initial_path: impl AsRef) -> Result { + let mut path = initial_path.as_ref().to_path_buf(); + loop { + path.push("Cargo.toml"); + if path.exists() { + return Ok(path); + } + path.pop(); + ensure!(path.pop(), "No Cargo.toml found for {}", initial_path.as_ref().display()); + } +} diff --git a/build/build/src/postgres.rs b/build/build/src/postgres.rs new file mode 100644 index 0000000000..343fa863e4 --- /dev/null +++ b/build/build/src/postgres.rs @@ -0,0 +1,252 @@ +use crate::prelude::*; + +use ide_ci::env::new::RawVariable; +use ide_ci::env::new::TypedVariable; +use ide_ci::get_free_port; +use ide_ci::programs::docker::ContainerId; +use ide_ci::programs::docker::ImageId; +use ide_ci::programs::docker::Network; +use ide_ci::programs::docker::RunOptions; +use ide_ci::programs::Docker; +use std::process::Stdio; +use tokio::io::AsyncBufReadExt; +use tokio::io::AsyncRead; +use tokio::io::BufReader; +use tokio::process::Child; + + + +/// Port used by Postgres in its container. +const POSTGRES_CONTAINER_DEFAULT_PORT: u16 = 5432; + +/// Environment variables used to configure the Postgres container. +pub mod env { + + pub mod container { + ide_ci::define_env_var! { + POSTGRES_DB, String; + POSTGRES_USER, String; + POSTGRES_PASSWORD, String; + } + } + pub mod tests { + ide_ci::define_env_var! { + ENSO_DATABASE_TEST_DB_NAME, String; + ENSO_DATABASE_TEST_HOST, String; + ENSO_DATABASE_TEST_DB_USER, String; + ENSO_DATABASE_TEST_DB_PASSWORD, String; + } + } +} + +#[derive(Clone, Debug)] +pub enum EndpointConfiguration { + /// Used when the Postgres container is started directly from host (rather than Docker + /// container). In such case the Postgres will be exposed to host network on a given port. + Host { port: u16 }, + /// Used when Postgres is spawned from a container. In such case it will be spawned in a owning + /// container's network on the default port. + Container { owner: ContainerId }, +} + +impl EndpointConfiguration { + /// Tries to deduce what endpoint should be used for a spawned Postgres service. + pub fn deduce() -> Result { + if let Ok(container_name) = std::env::var("ENSO_RUNNER_CONTAINER_NAME") { + debug!("Assuming that I am in the Docker container named {container_name}."); + Ok(Self::Container { owner: ContainerId(container_name) }) + } else { + // If we are running on the bare machine (i.e. not in container), we spawn postgres + // and expose it on a free host port. Then we can directly consume. + let port = if port_check::is_local_port_free(POSTGRES_CONTAINER_DEFAULT_PORT) { + // Prefer the usual port. + POSTGRES_CONTAINER_DEFAULT_PORT + } else { + get_free_port()? + }; + Ok(Self::Host { port }) + } + } +} + +#[derive(Clone, Debug)] +pub struct Configuration { + pub postgres_container: ContainerId, + pub database_name: String, + pub user: String, + pub password: String, + pub endpoint: EndpointConfiguration, + pub version: String, +} + +impl Configuration { + pub fn image_id(&self) -> ImageId { + ImageId(format!("postgres:{}", &self.version)) + } + + pub fn set_enso_test_env(&self) -> Result { + env::tests::ENSO_DATABASE_TEST_DB_NAME.set(&self.database_name)?; + env::tests::ENSO_DATABASE_TEST_HOST.set(match &self.endpoint { + EndpointConfiguration::Host { port } => format!("localhost:{port}"), + EndpointConfiguration::Container { .. } => + format!("localhost:{POSTGRES_CONTAINER_DEFAULT_PORT}"), + })?; + env::tests::ENSO_DATABASE_TEST_DB_USER.set(&self.user)?; + env::tests::ENSO_DATABASE_TEST_DB_PASSWORD.set(&self.password)?; + Ok(()) + } + + pub fn clear_enso_test_env(&self) { + env::tests::ENSO_DATABASE_TEST_DB_NAME.remove(); + env::tests::ENSO_DATABASE_TEST_HOST.remove(); + env::tests::ENSO_DATABASE_TEST_DB_USER.remove(); + env::tests::ENSO_DATABASE_TEST_DB_PASSWORD.remove(); + } + + pub async fn cleanup(&self) -> Result { + Docker.remove_container(&self.postgres_container, true).await + } +} + +/// Retrieve input from asynchronous reader line by line and feed them into the given function. +pub async fn process_lines(reader: R, f: impl Fn(String)) -> Result { + debug!("Started line processor."); + let mut reader = BufReader::new(reader); + let mut line = String::new(); + while reader.read_line(&mut line).await? != 0 { + f(std::mem::take(&mut line)); + } + Ok(reader.into_inner()) +} + +pub async fn process_lines_until( + reader: R, + f: &impl Fn(&str) -> bool, +) -> Result { + let mut reader = BufReader::new(reader); + let mut line = String::new(); + loop { + let bytes_read = reader.read_line(&mut line).await?; + ensure!(bytes_read != 0, "Postgresql container closed without being ready!"); + if f(&line) { + break; + } + line.clear(); + } + Ok(reader.into_inner()) +} + +#[derive(Debug)] +pub struct PostgresContainer { + _docker_run: Child, + config: Configuration, +} + +impl Drop for PostgresContainer { + fn drop(&mut self) { + self.config.clear_enso_test_env(); + + debug!("Will remove the postgres container"); + let cleanup_future = self.config.cleanup(); + if let Err(e) = futures::executor::block_on(cleanup_future) { + debug!( + "Failed to kill the Postgres container named {}: {}", + self.config.postgres_container, e + ); + } else { + debug!("Postgres container killed."); + } + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Postgresql; + +impl Postgresql { + pub async fn start(config: Configuration) -> Result { + // Attempt cleanup in case previous script run crashed in the middle of this. + // Otherwise, postgres container names could collide. + let _ = config.cleanup().await; + + let mut opts = RunOptions::new(config.image_id()); + opts.env(&env::container::POSTGRES_DB, &*config.database_name)?; + opts.env(&env::container::POSTGRES_USER, &*config.user)?; + opts.env(&env::container::POSTGRES_PASSWORD, &*config.password)?; + match &config.endpoint { + EndpointConfiguration::Host { port } => { + opts.publish_port(*port, POSTGRES_CONTAINER_DEFAULT_PORT); + } + EndpointConfiguration::Container { owner } => { + opts.network = Some(Network::Container(owner.clone())); + } + } + opts.sig_proxy = Some(true); + opts.name = Some(config.postgres_container.to_string()); + + let mut cmd = Docker.run_cmd(&opts)?; + cmd.stderr(Stdio::piped()); + cmd.kill_on_drop(true); + let mut child = cmd.spawn().anyhow_err()?; + let stderr = child + .stderr + .ok_or_else(|| anyhow!("Failed to access standard output of the spawned process!"))?; + + // Wait until container is ready. + let check_line = |line: &str| { + debug!("ERR: {}", line); + line.contains("database system is ready to accept connections") + }; + let stderr = process_lines_until(stderr, &check_line).await?; + + // Put back stream we've been reading and pack the whole thing back for the caller. + child.stderr = Some(stderr); + + config.set_enso_test_env()?; + Ok(PostgresContainer { _docker_run: child, config }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + #[ignore] + async fn start_postgres() -> Result { + let config = Configuration { + postgres_container: ContainerId("something".into()), + endpoint: EndpointConfiguration::deduce()?, + version: "latest".into(), + user: "test".into(), + password: "test".into(), + database_name: "test".into(), + }; + let child = Postgresql::start(config).await?; + // drop(child); + std::mem::forget(child); + Ok(()) + } + + #[tokio::test] + #[ignore] + async fn test_postgres() -> Result { + // let config = Configuration { + // postgres_container: ContainerId("something".into()), + // endpoint: EndpointConfiguration::deduce()?, + // version: "latest".into(), + // user: "test".into(), + // password: "test".into(), + // database_name: "test".into(), + // }; + // let child = Postgresql::start(config).await?; + // std::mem::forget(child); + // // let mut httpbin = get_and_spawn_httpbin_on_free_port().await?; + // Command::new("cmd") + // .args(["/c", + // "H:\\NBO\\enso2\\built-distribution\\enso-engine-0.2.32-SNAPSHOT-windows-amd64\\enso-0.2. + // 32-SNAPSHOT\\bin\\enso", "--no-ir-caches", "--run", + // "H:\\NBO\\enso2\\test\\Database_Tests"]).run_ok().await?; httpbin.process.kill(). + // await?; + Ok(()) + } +} diff --git a/build/build/src/prettier.rs b/build/build/src/prettier.rs new file mode 100644 index 0000000000..bd9142c51f --- /dev/null +++ b/build/build/src/prettier.rs @@ -0,0 +1,27 @@ +use crate::prelude::*; + +use crate::paths::generated::RepoRoot; + +use ide_ci::programs::Npm; + + + +pub fn install_and_run_prettier(repo_root: &RepoRoot, script: &str) -> BoxFuture<'static, Result> { + let prettier_dir = repo_root.build.prettier.to_path_buf(); + let script = script.to_string(); + async move { + let no_args: [&str; 0] = []; + Npm.cmd()?.current_dir(&prettier_dir).install().run_ok().await?; + Npm.cmd()?.current_dir(&prettier_dir).run(script, no_args).run_ok().await?; + Ok(()) + } + .boxed() +} + +pub fn check(repo_root: &RepoRoot) -> BoxFuture<'static, Result> { + install_and_run_prettier(repo_root, "check") +} + +pub fn write(repo_root: &RepoRoot) -> BoxFuture<'static, Result> { + install_and_run_prettier(repo_root, "write") +} diff --git a/build/build/src/programs.rs b/build/build/src/programs.rs new file mode 100644 index 0000000000..3d9ea72c87 --- /dev/null +++ b/build/build/src/programs.rs @@ -0,0 +1,45 @@ +use crate::prelude::*; + + + +pub mod project_manager { + use super::*; + use std::process::Stdio; + + use ide_ci::define_env_var; + + define_env_var! { + /// Custom project root. Useful if we want to use backend without affecting user's default + /// workspace. + PROJECTS_ROOT, PathBuf; + } + + #[derive(Clone, Copy, Debug)] + pub struct ProjectManager; + + impl Program for ProjectManager { + fn executable_name(&self) -> &'static str { + "project-manager" + } + } + + pub fn spawn_from(bundle: &crate::paths::generated::ProjectManager) -> Command { + let binary_path = bundle.bin.project_managerexe.as_path(); + let mut command = ::Command::new(binary_path); + // We do this, because Project Manager runs until newline is input. We need to create a pipe + // to control its lifetime. + command.stdin(Stdio::piped()); + command + } + // #[derive(Shrinkwrap)] + // #[shrinkwrap(mutable)] + // pub struct Command(pub ide_ci::program::Command); + // + // impl From for Command { + // fn from(inner: ide_ci::prelude::Command) -> Self { + // Self(inner) + // } + // } + // + // impl MyCommand

for Command {} +} diff --git a/build/build/src/project.rs b/build/build/src/project.rs new file mode 100644 index 0000000000..84519cfdeb --- /dev/null +++ b/build/build/src/project.rs @@ -0,0 +1,363 @@ +use crate::prelude::*; + +use crate::source::BuildTargetJob; +use crate::source::CiRunSource; +use crate::source::ExternalSource; +use crate::source::FetchTargetJob; +use crate::source::GetTargetJob; +use crate::source::OngoingCiRunSource; +use crate::source::ReleaseSource; +use crate::source::Source; +use crate::source::WatchTargetJob; +use crate::source::WithDestination; + +use derivative::Derivative; +use ide_ci::actions::artifacts; +use ide_ci::cache; +use ide_ci::cache::Cache; +use ide_ci::ok_ready_boxed; +use octocrab::models::repos::Asset; + + +// ============== +// === Export === +// ============== + +pub mod backend; +pub mod engine; +pub mod gui; +pub mod ide; +pub mod project_manager; +pub mod runtime; +pub mod wasm; + +pub use backend::Backend; +pub use gui::Gui; +pub use ide::Ide; +pub use runtime::Runtime; +pub use wasm::Wasm; + + + +// FIXME: this works for Project Manager bundle-style archives only, not all. +pub fn path_to_extract() -> Option { + Some("enso".into()) +} + +/// A built target, contained under a single directory. +/// +/// The `AsRef` trait must return that directory path. +pub trait IsArtifact: Clone + AsRef + Sized + Send + Sync + 'static {} + +/// Plain artifact is just a folder with... things. +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct PlainArtifact { + /// Directory path. + pub path: PathBuf, + /// Phantom, so we can tell artifacts of different projects apart. + #[derivative(Debug = "ignore")] + pub phantom: PhantomData, +} + +impl AsRef for PlainArtifact { + fn as_ref(&self) -> &Path { + self.path.as_path() + } +} + +impl IsArtifact for PlainArtifact {} + +impl PlainArtifact { + pub fn new(path: impl Into) -> Self { + Self { path: path.into(), phantom: default() } + } + + fn from_existing(path: impl AsRef) -> BoxFuture<'static, Result> + where T: Send + Sync + 'static { + ready(Ok(Self::new(path.as_ref()))).boxed() + } +} + +/// State available to all project-related operations. +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct Context { + /// GitHub API client. + /// + /// If authorized, it will count API rate limits against our identity and allow operations like + /// managing releases or downloading CI run artifacts. + #[derivative(Debug = "ignore")] + pub octocrab: Octocrab, + + /// Stores things like downloaded release assets to save time. + pub cache: Cache, + + /// Whether built artifacts should be uploaded as part of CI run. Works only in CI environment. + pub upload_artifacts: bool, + + /// Directory being an `enso` repository's working copy. + /// + /// The directory is not required to be a git repository. It is allowed to use source tarballs + /// as well. + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub repo_root: crate::paths::generated::RepoRoot, +} + +/// Build targets, like GUI or Project Manager. +/// +/// Built target generates artifacts that can be stored as a release asset or CI run artifacts. +pub trait IsTarget: Clone + Debug + Sized + Send + Sync + 'static { + /// All the data needed to build this target that are not placed in `self`. + type BuildInput: Debug + Send + 'static; + + /// A location-like value with the directory where the artifacts are placed. + type Artifact: IsArtifact; + + /// Identifier used when uploading build artifacts to run. + /// + /// Note that this is not related to the assets name in the release. + fn artifact_name(&self) -> String; + + /// Create a full artifact description from an on-disk representation. + fn adapt_artifact(self, path: impl AsRef) -> BoxFuture<'static, Result>; + + fn get( + &self, + context: Context, + job: GetTargetJob, + ) -> BoxFuture<'static, Result> { + let GetTargetJob { destination, inner } = job; + match inner { + Source::BuildLocally(inputs) => + self.build(context, WithDestination { inner: inputs, destination }), + Source::External(external) => + self.get_external(context, WithDestination { inner: external, destination }), + } + } + + /// Produce an artifact from the external resource reference. + fn get_external( + &self, + context: Context, + job: FetchTargetJob, + ) -> BoxFuture<'static, Result> { + let FetchTargetJob { inner: source, destination } = job; + let this = self.clone(); + let span = debug_span!("Getting artifact from an external source"); + match source { + ExternalSource::OngoingCiRun(OngoingCiRunSource { artifact_name }) => async move { + ide_ci::actions::artifacts::retrieve_compressed_directory( + artifact_name, + &destination, + ) + .await?; + this.adapt_artifact(destination).await + } + .boxed(), + ExternalSource::CiRun(ci_run) => self.download_artifact(context, ci_run, destination), + ExternalSource::LocalFile(source_path) => async move { + ide_ci::fs::mirror_directory(source_path, &destination).await?; + this.adapt_artifact(destination).await + } + .boxed(), + ExternalSource::Release(release) => self.download_asset(context, release, destination), + } + .instrument(span) + .boxed() + } + + /// Produce an artifact from build inputs. + fn build( + &self, + context: Context, + job: BuildTargetJob, + ) -> BoxFuture<'static, Result> { + let span = debug_span!("Building.", ?self, ?context, ?job).entered(); + let upload_artifacts = context.upload_artifacts; + let artifact_fut = self.build_internal(context, job); + let this = self.clone(); + async move { + let artifact = artifact_fut.await.context(format!("Failed to build {:?}.", this))?; + // We upload only built artifacts. There would be no point in uploading something that + // we've just downloaded. That's why the uploading code is here. + if upload_artifacts { + this.perhaps_upload_artifact(&artifact).await?; + } + Ok(artifact) + } + .instrument(span.exit()) + .boxed() + } + + fn perhaps_upload_artifact(&self, artifact: &Self::Artifact) -> BoxFuture<'static, Result> { + let should_upload_artifact = ide_ci::actions::workflow::is_in_env(); + trace!("Got target {:?}, should it be uploaded? {}", self, should_upload_artifact); + if should_upload_artifact { + self.upload_artifact(ready(Ok(artifact.clone()))) + } else { + ok_ready_boxed(()) + } + } + + /// Produce an artifact from build inputs. + fn build_internal( + &self, + context: Context, + job: BuildTargetJob, + ) -> BoxFuture<'static, Result>; + + /// Upload artifact to the current GitHub Actions run. + fn upload_artifact( + &self, + output: impl Future> + Send + 'static, + ) -> BoxFuture<'static, Result> { + let name = self.artifact_name(); + async move { artifacts::upload_compressed_directory(output.await?, name).await }.boxed() + } + + fn download_artifact( + &self, + context: Context, + ci_run: CiRunSource, + output_path: impl AsRef + Send + Sync + 'static, + ) -> BoxFuture<'static, Result> { + let Context { octocrab, cache, upload_artifacts: _, repo_root: _ } = context; + let CiRunSource { run_id, artifact_name, repository } = ci_run; + let span = info_span!("Downloading CI Artifact.", %artifact_name, %repository, target = output_path.as_str()); + let this = self.clone(); + async move { + let artifact = + repository.find_artifact_by_name(&octocrab, run_id, &artifact_name).await?; + info!("Will download artifact: {:#?}", artifact); + let artifact_to_get = cache::artifact::ExtractedArtifact { + client: octocrab.clone(), + key: cache::artifact::Key { artifact_id: artifact.id, repository }, + }; + let artifact = cache.get(artifact_to_get).await?; + let inner_archive_path = + artifact.join(&artifact_name).with_appended_extension("tar.gz"); + ide_ci::archive::extract_to(&inner_archive_path, &output_path).await?; + this.adapt_artifact(output_path).await + } + .instrument(span) + .boxed() + } + + fn find_asset(&self, _assets: Vec) -> Result { + todo!("Not implemented for target {self:?}!") + } + + fn download_asset( + &self, + context: Context, + source: ReleaseSource, + destination: PathBuf, + ) -> BoxFuture<'static, Result> { + let Context { octocrab, cache, upload_artifacts: _, repo_root: _ } = context; + let span = info_span!("Downloading built target from a release asset.", + asset_id = source.asset_id.0, + repo = %source.repository); + let this = self.clone(); + async move { + let ReleaseSource { asset_id, repository } = &source; + let archive_source = repository.download_asset_job(&octocrab, *asset_id); + let extract_job = cache::archive::ExtractedArchive { + archive_source, + path_to_extract: path_to_extract(), + }; + let directory = cache.get(extract_job).await?; + ide_ci::fs::remove_if_exists(&destination)?; + ide_ci::fs::symlink_auto(&directory, &destination)?; + this.adapt_artifact(destination).await + } + .instrument(span) + .boxed() + } +} + +#[derive(Debug)] +pub enum PerhapsWatched { + Watched(T::Watcher), + Static(T::Artifact), +} + +impl AsRef for PerhapsWatched { + fn as_ref(&self) -> &T::Artifact { + match self { + PerhapsWatched::Watched(watcher) => watcher.as_ref(), + PerhapsWatched::Static(static_artifact) => static_artifact, + } + } +} + +impl PerhapsWatched { + pub async fn wait_ok(&mut self) -> Result { + match self { + PerhapsWatched::Watched(watcher) => watcher.wait_for_finish().await, + PerhapsWatched::Static(_) => Ok(()), + } + } +} + +pub trait ProcessWrapper { + fn inner(&mut self) -> &mut tokio::process::Child; + + fn wait_ok(&mut self) -> BoxFuture { + ide_ci::extensions::child::ChildExt::wait_ok(self.inner()).boxed() + } + fn kill(&mut self) -> BoxFuture { + self.inner().kill().anyhow_err().boxed() + } +} + +impl ProcessWrapper for tokio::process::Child { + fn inner(&mut self) -> &mut tokio::process::Child { + self + } +} + +/// Watcher is an ongoing process that keeps updating the artifacts to follow changes to the +/// target's source. +#[derive(Debug)] +pub struct Watcher { + /// Where the watcher outputs artifacts. + pub artifact: Target::Artifact, + /// The process performing the watch. + /// + /// For example, an instance of cargo-watch. + pub watch_process: Proc, +} + +impl ProcessWrapper for Watcher { + fn inner(&mut self) -> &mut tokio::process::Child { + self.watch_process.inner() + } +} + +impl AsRef for Watcher { + fn as_ref(&self) -> &Target::Artifact { + &self.artifact + } +} + +impl IsWatcher for Watcher { + fn wait_for_finish(&mut self) -> BoxFuture { + self.watch_process.wait_ok() + } +} + +pub trait IsWatcher: AsRef + Send { + fn wait_for_finish(&mut self) -> BoxFuture; +} + +pub trait IsWatchable: IsTarget { + type Watcher: IsWatcher; + type WatchInput: Clone + Debug + Send; + + fn watch( + &self, + context: Context, + job: WatchTargetJob, + ) -> BoxFuture<'static, Result>; +} diff --git a/build/build/src/project/backend.rs b/build/build/src/project/backend.rs new file mode 100644 index 0000000000..000bb87b60 --- /dev/null +++ b/build/build/src/project/backend.rs @@ -0,0 +1,165 @@ +use crate::prelude::*; + +use crate::engine::BuildConfigurationFlags; +use crate::paths::pretty_print_arch; +use crate::paths::TargetTriple; +use crate::project::Context; +use crate::project::IsArtifact; +use crate::project::IsTarget; +use crate::source::BuildTargetJob; +use crate::source::WithDestination; +use crate::version::Versions; + +use derivative::Derivative; +use ide_ci::archive::is_archive_name; +use ide_ci::extensions::os::OsExt; +use octocrab::models::repos::Asset; + + + +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct BuildInput { + pub versions: Versions, + #[derivative(Debug = "ignore")] + pub external_runtime: Option>, +} + +impl BuildInput { + pub fn prepare_context( + &self, + inner: Context, + config: BuildConfigurationFlags, + ) -> Result { + let BuildInput { versions, external_runtime } = self; + crate::engine::RunContext::new( + inner, + config, + TargetTriple::new(versions.clone()), + external_runtime.clone(), + ) + } +} + +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct Artifact { + /// Location of the Project Manager distribution. + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub path: crate::paths::generated::ProjectManager, + /// Versions of Engine that are bundled in this Project Manager distribution. + /// + /// Technically a Project Manager bundle can be shipped with arbitrary number of Enso Engine + /// packages. However in packages we create it is almost always zero (for plain PM package) or + /// one (for full PM bundle). + /// + /// Artifacts built with [`ProjectManager::build`] will have exactly one engine + /// bundled. + #[derivative(Debug(format_with = "ide_ci::fmt::display_list"))] + pub engine_versions: Vec, +} + +impl AsRef for Artifact { + fn as_ref(&self) -> &Path { + &self.path + } +} + +impl IsArtifact for Artifact {} + +/// Retrieves a list of all Enso Engine versions that are bundled within a given Project Manager +/// distribution. +#[context("Failed to list bundled engine versions: {}", project_manager_bundle)] +pub async fn bundled_engine_versions( + project_manager_bundle: &crate::paths::generated::ProjectManager, +) -> Result> { + let mut ret = vec![]; + + let mut dir_reader = ide_ci::fs::tokio::read_dir(&project_manager_bundle.dist).await?; + while let Some(entry) = dir_reader.next_entry().await? { + if entry.metadata().await?.is_dir() { + ret.push(Version::from_str(entry.file_name().as_str())?); + } + } + Ok(ret) +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Backend { + pub target_os: OS, +} + +impl Backend { + pub fn matches_platform(&self, name: &str) -> bool { + // Sample name: "project-manager-bundle-2022.1.1-nightly.2022-04-16-linux-amd64.tar.gz" + let os_matches = name.contains(self.target_os.as_str()); + // Arch test involves a workaround for Engine being built through Rosette on Apple Silicon. + let arch_matches = name.contains(pretty_print_arch(TARGET_ARCH)) + || (TARGET_ARCH == Arch::AArch64 && name.contains(pretty_print_arch(Arch::X86_64))); + os_matches && arch_matches + } +} + +impl IsTarget for Backend { + type BuildInput = BuildInput; + type Artifact = Artifact; + + fn artifact_name(&self) -> String { + // Version is not part of the name intentionally. We want to refer to PM bundles as + // artifacts without knowing their version. + format!("project-manager-{}", self.target_os) + } + + fn adapt_artifact(self, path: impl AsRef) -> BoxFuture<'static, Result> { + let path = crate::paths::generated::ProjectManager::new_root( + path.as_ref(), + self.target_os.exe_suffix(), + ); + async move { + let engine_versions = bundled_engine_versions(&path).await?; + Ok(Artifact { path, engine_versions }) + } + .boxed() + } + + fn build_internal( + &self, + context: Context, + job: BuildTargetJob, + ) -> BoxFuture<'static, Result> { + let WithDestination { inner, destination } = job; + let target_os = self.target_os; + let this = *self; + async move { + ensure!( + target_os == TARGET_OS, + "Enso Project Manager cannot be built on '{target_os}' for target '{TARGET_OS}'.", + ); + let config = BuildConfigurationFlags { + build_project_manager_bundle: true, + generate_java_from_rust: true, + ..default() + }; + let context = inner.prepare_context(context, config)?; + let artifacts = context.build().await?; + let project_manager = + artifacts.bundles.project_manager.context("Missing project manager bundle!")?; + ide_ci::fs::mirror_directory(&project_manager.dir, &destination).await?; + this.adapt_artifact(destination).await + } + .boxed() + } + + fn find_asset(&self, assets: Vec) -> Result { + assets + .into_iter() + .find(|asset| { + let name = &asset.name; + self.matches_platform(name) + && is_archive_name(name) + && name.contains("project-manager") + && (name.contains("bundle") || asset.size > 200_000_000) + }) + .context("Failed to find release asset with Enso Project Manager bundle.") + } +} diff --git a/build/build/src/project/engine.rs b/build/build/src/project/engine.rs new file mode 100644 index 0000000000..6e406dfa57 --- /dev/null +++ b/build/build/src/project/engine.rs @@ -0,0 +1,73 @@ +// use crate::prelude::*; +// +// use crate::engine::BuildConfigurationFlags; +// use crate::project::Context; +// use crate::project::IsArtifact; +// use crate::project::IsTarget; +// +// use ide_ci::goodie::GoodieDatabase; +// use ide_ci::ok_ready_boxed; +// +// pub use crate::project::backend::BuildInput; +// use crate::source::BuildTargetJob; +// use crate::source::WithDestination; +// +// #[derive(Clone, Debug)] +// pub struct Artifact { +// pub root: PathBuf, +// } +// +// impl AsRef for Artifact { +// fn as_ref(&self) -> &Path { +// &self.root +// } +// } +// +// impl IsArtifact for Artifact {} +// +// +// #[derive(Clone, Copy, Debug, PartialEq)] +// pub struct Engine; +// +// impl IsTarget for Engine { +// type BuildInput = BuildInput; +// type Artifact = Artifact; +// +// fn artifact_name(&self) -> String { +// "Enso Engine".into() +// } +// +// fn adapt_artifact(self, path: impl AsRef) -> BoxFuture<'static, Result> +// { ok_ready_boxed(Artifact { root: path.as_ref().into() }) +// } +// +// fn build_internal( +// &self, +// context: Context, +// job: BuildTargetJob, +// ) -> BoxFuture<'static, Result> { +// let WithDestination { inner, destination } = job; +// let this = self.clone(); +// async move { +// let paths = crate::paths::Paths::new_versions(&inner.repo_root, inner.versions)?; +// let context = crate::engine::context::RunContext { +// operation: crate::engine::Operation::Build, +// goodies: GoodieDatabase::new()?, +// config: BuildConfigurationFlags { +// clean_repo: false, +// build_engine_package: true, +// ..crate::engine::NIGHTLY +// } +// .into(), +// inner: context, +// paths, +// }; +// let artifacts = context.build().await?; +// let engine_distribution = +// artifacts.packages.engine.context("Missing Engine Distribution!")?; +// ide_ci::fs::mirror_directory(&engine_distribution.dir, &destination).await?; +// this.adapt_artifact(destination).await +// } +// .boxed() +// } +// } diff --git a/build/build/src/project/gui.rs b/build/build/src/project/gui.rs new file mode 100644 index 0000000000..fc82e70a6e --- /dev/null +++ b/build/build/src/project/gui.rs @@ -0,0 +1,176 @@ +use crate::prelude::*; + +use crate::ide::web::IdeDesktop; +use crate::project::Context; +use crate::project::IsTarget; +use crate::project::IsWatchable; +use crate::project::IsWatcher; +use crate::project::PerhapsWatched; +use crate::project::PlainArtifact; +use crate::project::Wasm; +use crate::source::BuildTargetJob; +use crate::source::GetTargetJob; +use crate::source::Source; +use crate::source::WatchTargetJob; +use crate::source::WithDestination; +use crate::BoxFuture; + +use derivative::Derivative; +use futures_util::future::try_join; +use ide_ci::ok_ready_boxed; + + + +pub type Artifact = PlainArtifact; + +#[derive(Clone, Derivative, derive_more::Deref)] +#[derivative(Debug)] +pub struct WatchInput { + #[deref] + pub wasm: ::WatchInput, + /// Rather than start web watcher, spawn an interactive shell. + pub shell: bool, +} + +#[derive(derivative::Derivative)] +#[derivative(Debug)] +pub struct BuildInput { + #[derivative(Debug = "ignore")] + pub wasm: GetTargetJob, + /// BoxFuture<'static, Result>, + #[derivative(Debug = "ignore")] + pub build_info: BoxFuture<'static, Result>, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Gui; + +#[async_trait] +impl IsTarget for Gui { + type BuildInput = BuildInput; + type Artifact = Artifact; + + fn artifact_name(&self) -> String { + "gui".into() + } + + fn adapt_artifact(self, path: impl AsRef) -> BoxFuture<'static, Result> { + Artifact::from_existing(path) + } + + fn build_internal( + &self, + context: Context, + job: BuildTargetJob, + ) -> BoxFuture<'static, Result> { + let WithDestination { inner, destination } = job; + async move { + let ide = ide_desktop_from_context(&context); + let wasm = Wasm.get(context, inner.wasm); + ide.build_content(wasm, &inner.build_info.await?, &destination).await?; + Ok(Artifact::new(destination)) + } + .boxed() + } +} + +#[derive(Debug)] +pub struct Watcher { + pub wasm: PerhapsWatched, + pub web: crate::project::Watcher, +} + +impl AsRef for Watcher { + fn as_ref(&self) -> &Artifact { + &self.web.artifact + } +} + +impl IsWatcher for Watcher { + fn wait_for_finish(&mut self) -> BoxFuture { + let Self { web, wasm } = self; + try_join(wasm.wait_ok(), IsWatcher::wait_for_finish(web)).void_ok().boxed() + } +} + +impl IsWatchable for Gui { + type Watcher = Watcher; + type WatchInput = WatchInput; + + // fn setup_watcher( + // &self, + // build_input: Self::BuildInput, + // watch_input: Self::WatchInput, + // output_path: impl AsRef + Send + Sync + 'static, + // ) -> BoxFuture<'static, Result> { + // async move { + // let BuildInput { build_info, repo_root, wasm } = build_input; + // let ide = IdeDesktop::new(&repo_root.app.ide_desktop); + // let watch_process = ide.watch_content(wasm, &build_info.await?).await?; + // let artifact = Self::Artifact::from_existing(output_path).await?; + // Ok(Self::Watcher { watch_process, artifact }) + // } + // .boxed() + // } + + fn watch( + &self, + context: Context, + job: WatchTargetJob, + ) -> BoxFuture<'static, Result> { + let WatchTargetJob { watch_input, build: WithDestination { inner, destination } } = job; + let BuildInput { build_info, wasm } = inner; + let perhaps_watched_wasm = perhaps_watch(Wasm, context.clone(), wasm, watch_input.wasm); + let ide = ide_desktop_from_context(&context); + async move { + let perhaps_watched_wasm = perhaps_watched_wasm.await?; + let wasm_artifacts = ok_ready_boxed(perhaps_watched_wasm.as_ref().clone()); + let watch_process = + ide.watch_content(wasm_artifacts, &build_info.await?, watch_input.shell).await?; + let artifact = Self::Artifact::from_existing(destination).await?; + let web_watcher = crate::project::Watcher { watch_process, artifact }; + Ok(Self::Watcher { wasm: perhaps_watched_wasm, web: web_watcher }) + } + .boxed() + } +} + +pub fn perhaps_watch( + target: T, + context: Context, + job: GetTargetJob, + watch_input: T::WatchInput, +) -> BoxFuture<'static, Result>> { + match job.inner { + Source::BuildLocally(local) => target + .watch(context, WatchTargetJob { + watch_input, + build: WithDestination { inner: local, destination: job.destination }, + }) + .map_ok(PerhapsWatched::Watched) + .boxed(), + Source::External(external) => target + .get_external(context, WithDestination { + inner: external, + destination: job.destination, + }) + .map_ok(PerhapsWatched::Static) + .boxed(), + } +} + +#[derive(Clone, Derivative, Serialize, Deserialize)] +#[derivative(Debug)] +#[serde(rename_all = "camelCase")] +pub struct BuildInfo { + pub commit: String, + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub version: Version, + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub engine_version: Version, + pub name: String, +} + +pub fn ide_desktop_from_context(context: &Context) -> IdeDesktop { + IdeDesktop::new(&context.repo_root, context.octocrab.clone(), context.cache.clone()) +} diff --git a/build/build/src/project/ide.rs b/build/build/src/project/ide.rs new file mode 100644 index 0000000000..53ae86d5f4 --- /dev/null +++ b/build/build/src/project/ide.rs @@ -0,0 +1,162 @@ +use crate::prelude::*; + +use crate::project::gui::ide_desktop_from_context; +use crate::project::Context; + +use ide_ci::actions::artifacts::upload_compressed_directory; +use ide_ci::actions::artifacts::upload_single_file; +use ide_ci::actions::workflow::is_in_env; + + + +#[derive(Clone, Debug)] +pub struct Artifact { + /// Directory with unpacked client distribution. + pub unpacked: PathBuf, + /// Entry point within an unpacked client distribution. + pub unpacked_executable: PathBuf, + /// File with the compressed client image (like installer or AppImage). + pub image: PathBuf, + /// File with the checksum of the image. + pub image_checksum: PathBuf, +} + +impl Artifact { + fn new( + target_os: OS, + target_arch: Arch, + version: &Version, + dist_dir: impl AsRef, + ) -> Self { + let unpacked = dist_dir.as_ref().join(match target_os { + OS::Linux => "linux-unpacked", + OS::MacOS if target_arch == Arch::AArch64 => "mac-arm64", + OS::MacOS if target_arch == Arch::X86_64 => "mac", + OS::Windows => "win-unpacked", + _ => todo!("{target_os}-{target_arch} combination is not supported"), + }); + let unpacked_executable = match target_os { + OS::Linux => "enso", + OS::MacOS => "Enso.app", + OS::Windows => "Enso.exe", + _ => todo!("{target_os}-{target_arch} combination is not supported"), + } + .into(); + let image = dist_dir.as_ref().join(match target_os { + OS::Linux => format!("enso-linux-{}.AppImage", version), + OS::MacOS => format!("enso-mac-{}.dmg", version), + OS::Windows => format!("enso-win-{}.exe", version), + _ => todo!("{target_os}-{target_arch} combination is not supported"), + }); + + Self { + image_checksum: image.with_extension("sha256"), + image, + unpacked, + unpacked_executable, + } + } + + pub async fn upload_as_ci_artifact(&self) -> Result { + if is_in_env() { + upload_compressed_directory(&self.unpacked, format!("ide-unpacked-{}", TARGET_OS)) + .await?; + upload_single_file(&self.image, format!("ide-{}", TARGET_OS)).await?; + upload_single_file(&self.image_checksum, format!("ide-{}", TARGET_OS)).await?; + } else { + info!("Not in the CI environment, will not upload the artifacts.") + } + Ok(()) + } + + pub fn start_unpacked( + &self, + extra_ide_options: impl IntoIterator>, + ) -> Command { + let application_path = self.unpacked.join(&self.unpacked_executable); + let mut command = if TARGET_OS == OS::MacOS { + let mut ret = Command::new("open"); + ret.arg(application_path); + ret + } else { + Command::new(application_path) + }; + command.args(extra_ide_options); + command + } +} + +#[derive(derivative::Derivative)] +#[derivative(Debug)] +pub struct BuildInput { + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub version: Version, + #[derivative(Debug = "ignore")] + pub project_manager: BoxFuture<'static, Result>, + #[derivative(Debug = "ignore")] + pub gui: BoxFuture<'static, Result>, +} + +#[derive(Clone, Debug)] +pub enum OutputPath { + /// The job must place the artifact under given path. + Required(PathBuf), + /// THe job may place the artifact anywhere, though it should use the suggested path if it has + /// no "better idea" (like reusing existing cache). + Suggested(PathBuf), + /// The job is responsible for finding a place for artifacts. + Whatever, +} + + +#[derive(Clone, Copy, Debug)] +pub struct Ide { + pub target_os: OS, + pub target_arch: Arch, +} + +impl Ide { + pub fn build( + &self, + context: &Context, + input: BuildInput, + output_path: impl AsRef + Send + Sync + 'static, + ) -> BoxFuture<'static, Result> { + let BuildInput { version, project_manager, gui } = input; + let ide_desktop = ide_desktop_from_context(context); + let target_os = self.target_os; + let target_arch = self.target_arch; + async move { + let (gui, project_manager) = try_join!(gui, project_manager)?; + ide_desktop.dist(&gui, &project_manager, &output_path, target_os).await?; + Ok(Artifact::new(target_os, target_arch, &version, output_path)) + } + .boxed() + } +} + +// impl IsTarget for Ide { +// type BuildInput = BuildInput; +// type Output = Artifact; +// +// fn artifact_name(&self) -> &str { +// // Version is not part of the name intentionally. We want to refer to PM bundles as +// // artifacts without knowing their version. +// static NAME: LazyLock = LazyLock::new(|| format!("gui-{}", TARGET_OS)); +// &*NAME +// } +// +// fn build( +// &self, +// input: Self::BuildInput, +// output_path: impl AsRef + Send + Sync + 'static, +// ) -> BoxFuture<'static, Result> { +// let ide_desktop = crate::ide::web::IdeDesktop::new(&input.repo_root.app.ide_desktop); +// async move { +// let (gui, project_manager) = try_join(input.gui, input.project_manager).await?; +// ide_desktop.dist(&gui, &project_manager, &output_path).await?; +// Ok(Artifact::new(&input.version, output_path.as_ref())) +// } +// .boxed() +// } +// } diff --git a/build/build/src/project/project_manager.rs b/build/build/src/project/project_manager.rs new file mode 100644 index 0000000000..73154c8c8f --- /dev/null +++ b/build/build/src/project/project_manager.rs @@ -0,0 +1,72 @@ +// use crate::prelude::*; +// +// use crate::engine::BuildConfigurationFlags; +// use crate::project::Context; +// use crate::project::IsArtifact; +// use crate::project::IsTarget; +// +// use ide_ci::goodie::GoodieDatabase; +// use ide_ci::ok_ready_boxed; +// +// pub use crate::project::backend::BuildInput; +// use crate::source::BuildTargetJob; +// use crate::source::WithDestination; +// +// #[derive(Clone, Debug)] +// pub struct Artifact { +// pub root: PathBuf, +// } +// +// impl AsRef for Artifact { +// fn as_ref(&self) -> &Path { +// &self.root +// } +// } +// +// impl IsArtifact for Artifact {} +// +// #[derive(Clone, Copy, Debug, PartialEq)] +// pub struct ProjectManager; +// +// impl IsTarget for ProjectManager { +// type BuildInput = BuildInput; +// type Artifact = Artifact; +// +// fn artifact_name(&self) -> String { +// "Enso Project Manager".into() +// } +// +// fn adapt_artifact(self, path: impl AsRef) -> BoxFuture<'static, Result> +// { ok_ready_boxed(Artifact { root: path.as_ref().into() }) +// } +// +// fn build_internal( +// &self, +// context: Context, +// job: BuildTargetJob, +// ) -> BoxFuture<'static, Result> { +// let WithDestination { inner, destination } = job; +// let this = self.clone(); +// async move { +// let paths = crate::paths::Paths::new_versions(&inner.repo_root, inner.versions)?; +// let context = crate::engine::context::RunContext { +// operation: crate::engine::Operation::Build, +// goodies: GoodieDatabase::new()?, +// config: BuildConfigurationFlags { +// clean_repo: false, +// build_project_manager_package: true, +// ..crate::engine::NIGHTLY +// } +// .into(), +// inner: context, +// paths, +// }; +// let artifacts = context.build().await?; +// let engine_distribution = +// artifacts.packages.project_manager.context("Missing Project Manager package!")?; +// ide_ci::fs::mirror_directory(&engine_distribution.dir, &destination).await?; +// this.adapt_artifact(destination).await +// } +// .boxed() +// } +// } diff --git a/build/build/src/project/runtime.rs b/build/build/src/project/runtime.rs new file mode 100644 index 0000000000..57738f9495 --- /dev/null +++ b/build/build/src/project/runtime.rs @@ -0,0 +1,92 @@ +//! Wrappers over the Rust part of the IDE codebase. + +use crate::prelude::*; + +use crate::engine::BuildConfigurationFlags; +use crate::paths::generated::EnginePackage; +use crate::paths::TargetTriple; +use crate::project::Context; +use crate::project::IsArtifact; +use crate::project::IsTarget; +use crate::source::BuildTargetJob; +use crate::source::WithDestination; +use crate::version::Versions; + + + +const ARTIFACT_NAME: &str = "runtime"; + +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct BuildInput { + pub versions: Versions, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Runtime; + +#[async_trait] +impl IsTarget for Runtime { + type BuildInput = BuildInput; + type Artifact = Artifact; + + fn artifact_name(&self) -> String { + ARTIFACT_NAME.into() + } + + fn adapt_artifact(self, path: impl AsRef) -> BoxFuture<'static, Result> { + ready(Ok(Artifact::new(path.as_ref()))).boxed() + } + + fn build_internal( + &self, + context: Context, + job: BuildTargetJob, + ) -> BoxFuture<'static, Result> { + let config = BuildConfigurationFlags { + build_engine_package: true, + generate_java_from_rust: true, + ..default() + }; + let this = *self; + let WithDestination { inner, destination } = job; + let triple = TargetTriple::new(inner.versions); + let context = crate::engine::RunContext::new(context, config, triple, None); + context + .and_then_async(|context| async move { + let artifacts = context.build().await?; + let engine_package = artifacts + .packages + .engine + .context("Failed to find engine package artifacts.")?; + ide_ci::fs::mirror_directory(&engine_package.dir, &destination).await?; + this.adapt_artifact(engine_package.dir).await + }) + .boxed() + } +} + +#[derive(Clone, Debug, Display, PartialEq, Eq)] +pub struct Artifact(EnginePackage); + +impl Artifact { + pub fn new(path: impl Into) -> Self { + Self(EnginePackage::new_root(path)) + } + + pub fn into_inner(self) -> EnginePackage { + self.0 + } +} + +impl AsRef for Artifact { + fn as_ref(&self) -> &Path { + self.0.as_path() + } +} + +impl IsArtifact for Artifact {} + + + +//////////////////////////////////////////////////////////////////////////////////////////// diff --git a/build/build/src/project/wasm.rs b/build/build/src/project/wasm.rs new file mode 100644 index 0000000000..cc6a63b786 --- /dev/null +++ b/build/build/src/project/wasm.rs @@ -0,0 +1,496 @@ +//! Wrappers over the Rust part of the IDE codebase. + +use crate::prelude::*; + +use crate::paths::generated::RepoRootDistWasm; +use crate::project::wasm::js_patcher::patch_js_glue_in_place; +use crate::project::Context; +use crate::project::IsArtifact; +use crate::project::IsTarget; +use crate::project::IsWatchable; +use crate::source::BuildTargetJob; +use crate::source::WatchTargetJob; +use crate::source::WithDestination; + +use derivative::Derivative; +use ide_ci::cache; +use ide_ci::env::Variable; +use ide_ci::fs::compressed_size; +use ide_ci::fs::copy_file_if_different; +use ide_ci::programs::cargo; +use ide_ci::programs::wasm_opt; +use ide_ci::programs::wasm_opt::WasmOpt; +use ide_ci::programs::wasm_pack; +use ide_ci::programs::Cargo; +use ide_ci::programs::WasmPack; +use semver::VersionReq; +use std::time::Duration; +use tempfile::tempdir; +use tokio::process::Child; + + +// ============== +// === Export === +// ============== + +pub mod env; +pub mod js_patcher; +pub mod test; + + + +pub const BINARYEN_VERSION_TO_INSTALL: u32 = 108; + +pub const DEFAULT_INTEGRATION_TESTS_WASM_TIMEOUT: Duration = Duration::from_secs(300); + +pub const INTEGRATION_TESTS_CRATE_NAME: &str = "enso-integration-test"; + +pub const OUTPUT_NAME: &str = "ide"; + +/// Name of the artifact that will be uploaded as part of CI run. +pub const WASM_ARTIFACT_NAME: &str = "gui_wasm"; + +pub const DEFAULT_TARGET_CRATE: &str = "app/gui"; + +#[derive(Clone, Copy, Debug, Default, strum::Display, strum::EnumString, PartialEq, Eq)] +#[strum(serialize_all = "kebab-case")] +pub enum ProfilingLevel { + #[default] + Objective, + Task, + Detail, + Debug, +} + +#[derive(clap::ArgEnum, Clone, Copy, Debug, PartialEq, Eq, strum::Display, strum::AsRefStr)] +#[strum(serialize_all = "kebab-case")] +pub enum Profile { + Dev, + Profile, + Release, + // Production, +} + +impl From for wasm_pack::Profile { + fn from(profile: Profile) -> Self { + match profile { + Profile::Dev => Self::Dev, + Profile::Profile => Self::Profile, + Profile::Release => Self::Release, + // Profile::Production => Self::Release, + } + } +} + +impl Profile { + pub fn should_check_size(self) -> bool { + match self { + Profile::Dev => false, + Profile::Profile => false, + Profile::Release => true, + // Profile::Production => true, + } + } + + pub fn extra_rust_options(self) -> Vec { + match self { + // Profile::Production => ["-Clto=fat", "-Ccodegen-units=1", "-Cincremental=false"] + // .into_iter() + // .map(ToString::to_string) + // .collect(), + Profile::Dev | Profile::Profile | Profile::Release => vec![], + } + } + + pub fn optimization_level(self) -> wasm_opt::OptimizationLevel { + match self { + Profile::Dev => wasm_opt::OptimizationLevel::O0, + Profile::Profile => wasm_opt::OptimizationLevel::O, + Profile::Release => wasm_opt::OptimizationLevel::O3, + } + } +} + +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct BuildInput { + /// Path to the crate to be compiled to WAM. Relative to the repository root. + pub crate_path: PathBuf, + pub wasm_opt_options: Vec, + pub skip_wasm_opt: bool, + pub extra_cargo_options: Vec, + pub profile: Profile, + pub profiling_level: Option, + pub wasm_size_limit: Option, +} + +impl BuildInput { + pub async fn perhaps_check_size(&self, wasm_path: impl AsRef) -> Result { + let compressed_size = compressed_size(&wasm_path).await?.get_appropriate_unit(true); + info!("Compressed size of {} is {}.", wasm_path.as_ref().display(), compressed_size); + if let Some(wasm_size_limit) = self.wasm_size_limit { + let wasm_size_limit = wasm_size_limit.get_appropriate_unit(true); + if !self.profile.should_check_size() { + warn!("Skipping size check because profile is '{}'.", self.profile,); + } else if self.profiling_level.unwrap_or_default() != ProfilingLevel::Objective { + // TODO? additional leeway as sanity check + warn!( + "Skipping size check because profiling level is {:?} rather than {}.", + self.profiling_level, + ProfilingLevel::Objective + ); + } else { + ensure!( + compressed_size < wasm_size_limit, + "Compressed WASM size ~{} ({} bytes) exceeds the limit of {} ({} bytes).", + compressed_size, + compressed_size.get_byte(), + wasm_size_limit, + wasm_size_limit.get_byte(), + ) + } + } + Ok(()) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Wasm; + +#[async_trait] +impl IsTarget for Wasm { + type BuildInput = BuildInput; + type Artifact = Artifact; + + fn artifact_name(&self) -> String { + WASM_ARTIFACT_NAME.into() + } + + fn adapt_artifact(self, path: impl AsRef) -> BoxFuture<'static, Result> { + ready(Ok(Artifact::new(path.as_ref()))).boxed() + } + + fn build_internal( + &self, + context: Context, + job: BuildTargetJob, + ) -> BoxFuture<'static, Result> { + let Context { octocrab: _, cache, upload_artifacts: _, repo_root } = context; + let WithDestination { inner, destination } = job; + let span = info_span!("Building WASM.", + repo = %repo_root.display(), + crate = %inner.crate_path.display(), + cargo_opts = ?inner.extra_cargo_options + ); + async move { + // Old wasm-pack does not pass trailing `build` command arguments to the Cargo. + // We want to be able to pass --profile this way. + WasmPack.require_present_that(VersionReq::parse(">=0.10.1")?).await?; + + let BuildInput { + crate_path, + wasm_opt_options, + skip_wasm_opt, + extra_cargo_options, + profile, + profiling_level, + wasm_size_limit: _wasm_size_limit, + } = &inner; + + cache::goodie::binaryen::Binaryen { version: BINARYEN_VERSION_TO_INSTALL } + .install_if_missing(&cache) + .await?; + + info!("Building wasm."); + let temp_dir = tempdir()?; + let temp_dist = RepoRootDistWasm::new_root(temp_dir.path()); + let mut command = WasmPack.cmd()?; + command + .current_dir(&repo_root) + .kill_on_drop(true) + .env_remove(ide_ci::programs::rustup::env::Toolchain::NAME) + .set_env(env::ENSO_ENABLE_PROC_MACRO_SPAN, &true)? + .build() + .arg(wasm_pack::Profile::from(*profile)) + .target(wasm_pack::Target::Web) + .output_directory(&temp_dist) + .output_name(OUTPUT_NAME) + .arg(crate_path) + .arg("--") + .apply(&cargo::Color::Always) + .args(extra_cargo_options); + + if let Some(profiling_level) = profiling_level { + command.set_env(env::ENSO_MAX_PROFILING_LEVEL, &profiling_level)?; + } + command.run_ok().await?; + + Self::finalize_wasm(wasm_opt_options, *skip_wasm_opt, *profile, &temp_dist).await?; + patch_js_glue_in_place(&temp_dist.wasm_glue)?; + + ide_ci::fs::create_dir_if_missing(&destination)?; + let ret = RepoRootDistWasm::new_root(&destination); + ide_ci::fs::copy(&temp_dist, &ret)?; + inner.perhaps_check_size(&ret.wasm_main).await?; + Ok(Artifact(ret)) + } + .instrument(span) + .boxed() + } +} + +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct WatchInput { + pub cargo_watch_options: Vec, +} + +impl IsWatchable for Wasm { + type Watcher = crate::project::Watcher; + type WatchInput = WatchInput; + + fn watch( + &self, + context: Context, + job: WatchTargetJob, + ) -> BoxFuture<'static, Result> { + let span = debug_span!("Watching WASM.", ?job).entered(); + + // The esbuild watcher must succeed in its first build, or it will prematurely exit. + // See the issue: https://github.com/evanw/esbuild/issues/1063 + // + // Because of this, we run first build of wasm manually, rather through cargo-watch. + // After it is completed, the cargo-watch gets spawned and this method yields the watcher. + // This forces esbuild watcher (whose setup requires the watcher artifacts) to wait until + // all wasm build outputs are in place, so the build won't crash. + // + // In general, much neater workaround should be possible, if we stop relying on cargo-watch + // and do the WASM watch directly in the build script. + let first_build_job = self + .build(context.clone(), job.build.clone()) + .instrument(debug_span!("Initial single build of WASM before setting up cargo-watch.")); + + async move { + let first_build_output = first_build_job.await?; + + let WatchTargetJob { + watch_input: WatchInput { cargo_watch_options: cargo_watch_flags }, + build: WithDestination { inner, destination }, + } = job; + let BuildInput { + crate_path, + wasm_opt_options, + skip_wasm_opt, + extra_cargo_options, + profile, + profiling_level, + wasm_size_limit, + } = inner; + + + let current_exe = std::env::current_exe()?; + // cargo-watch apparently cannot handle verbatim path prefix. We remove it and hope for + // the best. + let current_exe = current_exe.without_verbatim_prefix(); + + + let mut watch_cmd = Cargo.cmd()?; + + watch_cmd + .kill_on_drop(true) + .current_dir(&context.repo_root) + .arg("watch") + .args(["--ignore", "README.md"]) + .args(cargo_watch_flags) + .arg("--"); + + // === Build Script top-level options === + watch_cmd + // TODO [mwu] + // This is not nice, as this module should not be aware of the CLI + // parsing/generation. Rather than using `cargo watch` this should + // be implemented directly in Rust. + .arg(current_exe) + .arg("--skip-version-check") // We already checked in the parent process. + .args(["--cache-path", context.cache.path().as_str()]) + .args(["--upload-artifacts", context.upload_artifacts.to_string().as_str()]) + .args(["--repo-path", context.repo_root.as_str()]); + + // === Build Script command and its options === + watch_cmd + .arg("wasm") + .arg("build") + .args(["--crate-path", crate_path.as_str()]) + .args(["--wasm-output-path", destination.as_str()]) + .args(["--wasm-profile", profile.as_ref()]); + if let Some(profiling_level) = profiling_level { + watch_cmd.args(["--profiling-level", profiling_level.to_string().as_str()]); + } + for wasm_opt_option in wasm_opt_options { + watch_cmd.args(["--wasm-opt-option", &wasm_opt_option]); + } + if skip_wasm_opt { + watch_cmd.args(["--skip-wasm-opt"]); + } + if let Some(wasm_size_limit) = wasm_size_limit { + watch_cmd.args(["--wasm-size-limit", wasm_size_limit.to_string().as_str()]); + } + + // === cargo-watch options === + watch_cmd.arg("--").args(extra_cargo_options); + + let watch_process = watch_cmd.spawn_intercepting()?; + let artifact = Artifact(RepoRootDistWasm::new_root(&destination)); + ensure!( + artifact == first_build_output, + "First build output does not match general watch build output. First build output: \ + {first_build_output:?}, general watch build output: {artifact:?}", + ); + Ok(Self::Watcher { artifact, watch_process }) + } + .instrument(span.exit()) + .boxed() + } +} + + + +#[derive(Clone, Debug, Display, PartialEq, Eq)] +pub struct Artifact(RepoRootDistWasm); + +impl Artifact { + pub fn new(path: impl Into) -> Self { + Self(RepoRootDistWasm::new_root(path)) + } + pub fn wasm(&self) -> &Path { + &self.0.wasm_main + } + pub fn js_glue(&self) -> &Path { + &self.0.wasm_glue + } + pub fn dir(&self) -> &Path { + &self.0.path + } +} + +impl AsRef for Artifact { + fn as_ref(&self) -> &Path { + self.0.as_path() + } +} + +impl IsArtifact for Artifact {} + +impl Wasm { + pub async fn check(&self) -> Result { + Cargo + .cmd()? + .apply(&cargo::Command::Check) + .apply(&cargo::Options::Workspace) + .apply(&cargo::Options::Package(INTEGRATION_TESTS_CRATE_NAME.into())) + .apply(&cargo::Options::AllTargets) + .run_ok() + .await + } + + pub async fn test(&self, repo_root: PathBuf, wasm: bool, native: bool) -> Result { + async fn maybe_run>( + name: &str, + enabled: bool, + f: impl (FnOnce() -> Fut), + ) -> Result { + if enabled { + info!("Will run {name} tests."); + f().await.context(format!("Running {name} tests.")) + } else { + info!("Skipping {name} tests."); + Ok(()) + } + } + + maybe_run("native", native, async || { + Cargo + .cmd()? + .current_dir(repo_root.clone()) + .apply(&cargo::Command::Test) + .apply(&cargo::Options::Workspace) + // Color needs to be passed to tests themselves separately. + // See: https://github.com/rust-lang/cargo/issues/1983 + .arg("--") + .apply(&cargo::Color::Always) + .run_ok() + .await + }) + .await?; + + maybe_run("wasm", wasm, || test::test_all(repo_root.clone())).await?; + Ok(()) + } + + pub async fn integration_test( + &self, + source_root: PathBuf, + _project_manager: Option, + headless: bool, + additional_options: Vec, + wasm_timeout: Option, + ) -> Result { + info!("Running Rust WASM test suite."); + use wasm_pack::TestFlags::*; + WasmPack + .cmd()? + .current_dir(source_root) + .set_env_opt( + env::WASM_BINDGEN_TEST_TIMEOUT, + wasm_timeout.map(|d| d.as_secs()).as_ref(), + )? + .arg("test") + .apply_opt(headless.then_some(&Headless)) + .apply(&Chrome) + .arg("integration-test") + .arg("--profile=integration-test") + .args(additional_options) + .run_ok() + .await + // PM will be automatically killed by dropping the handle. + } + + /// Process "raw" WASM (as compiled) by optionally invoking wasm-opt. + pub async fn finalize_wasm( + wasm_opt_options: &[String], + skip_wasm_opt: bool, + profile: Profile, + temp_dist: &RepoRootDistWasm, + ) -> Result { + let should_call_wasm_opt = { + if profile == Profile::Dev { + debug!("Skipping wasm-opt invocation, as it is not part of profile {profile}."); + false + } else if skip_wasm_opt { + debug!("Skipping wasm-opt invocation, as it was explicitly requested."); + false + } else { + true + } + }; + + if should_call_wasm_opt { + let mut wasm_opt_command = WasmOpt.cmd()?; + let has_custom_opt_level = wasm_opt_options.iter().any(|opt| { + wasm_opt::OptimizationLevel::from_str(opt.trim_start_matches('-')).is_ok() + }); + if !has_custom_opt_level { + wasm_opt_command.apply(&profile.optimization_level()); + } + wasm_opt_command + .args(wasm_opt_options) + .arg(&temp_dist.wasm_main_raw) + .apply(&wasm_opt::Output(&temp_dist.wasm_main)) + .run_ok() + .await?; + } else { + copy_file_if_different(&temp_dist.wasm_main_raw, &temp_dist.wasm_main)?; + } + Ok(()) + } +} diff --git a/build/build/src/project/wasm/env.rs b/build/build/src/project/wasm/env.rs new file mode 100644 index 0000000000..7f8e66ae0d --- /dev/null +++ b/build/build/src/project/wasm/env.rs @@ -0,0 +1,38 @@ +//! Environment variables used by the GUI's Rust part build. + +use crate::project::wasm::ProfilingLevel; + + + +ide_ci::define_env_var! { + /// Enable a Rust unstable feature that the `#[profile]` macro uses to obtain source-file + /// and line number information to include in generated profile files. + /// + /// The IntelliJ Rust plugin does not support the `proc_macro_span` Rust feature; using it + /// causes JetBrains IDEs to become entirely unaware of the items produced by `#[profile]`. + /// (See: https://github.com/intellij-rust/intellij-rust/issues/8655) + /// + /// In order to have line number information in actual usage, but keep everything + /// understandable by JetBrains IDEs, we need IntelliJ/CLion to build crates differently + /// from how they are built for the application to be run. This is accomplished by gating + /// the use of the unstable functionality by a `cfg` flag. A `cfg` flag is disabled by + /// default, so when a Rust IDE builds crates internally in order to determine macro + /// expansions, it will do so without line numbers. When this script is used to build the + /// application, it is not for the purpose of IDE macro expansion, so we can safely enable + /// line numbers. + /// + /// The reason we don't use a Cargo feature for this is because this script can build + /// different crates, and we'd like to enable this feature when building any crate that + /// depends on the `profiler` crates. We cannot do something like + /// '--feature=enso_profiler/line-numbers' without causing build to fail when building a + /// crate that doesn't have `enso_profiler` in its dependency tree. + ENSO_ENABLE_PROC_MACRO_SPAN, bool; + + /// Use the environment-variable API provided by the `enso_profiler_macros` library to + /// implement the public interface to profiling-level configuration (see: + /// https://github.com/enso-org/design/blob/main/epics/profiling/implementation.md) + ENSO_MAX_PROFILING_LEVEL, ProfilingLevel; + + /// The timeout for `wasm-bindgen-test-runner` in seconds. + WASM_BINDGEN_TEST_TIMEOUT, u64; +} diff --git a/build/build/src/project/wasm/js_patcher.rs b/build/build/src/project/wasm/js_patcher.rs new file mode 100644 index 0000000000..adb0d5e655 --- /dev/null +++ b/build/build/src/project/wasm/js_patcher.rs @@ -0,0 +1,75 @@ +use crate::prelude::*; + +use regex::Regex; + + + +lazy_static! { + pub static ref REPLACEMENTS: Vec = [ + Replacement::new(r"(?s)if \(typeof input === 'string'.*return wasm;", "return imports"), + Replacement::new( + r"(?s)if \(typeof input === 'undefined'.*const imports = \{\};", + "const imports = {};", + ), + Replacement::new(r"(?s)export default init;", "export default init"), + ] + .into_iter() + .try_collect_vec() + // We parse literals that we know to be correct, as ensured by the tests. + .unwrap(); +} + +#[derive(Clone, Debug)] +pub struct Replacement { + pattern: Regex, + replacement: String, +} + +impl Replacement { + pub fn new(pattern: impl AsRef, replacement: impl Into) -> Result { + Ok(Self { pattern: Regex::new(pattern.as_ref())?, replacement: replacement.into() }) + } + + pub fn replace_all<'a>(&'_ self, text: &'a str) -> Cow<'a, str> { + self.pattern.replace_all(text, &self.replacement) + } +} + +pub fn multi_replace_all<'a, 'b>( + text: impl Into, + replacements: impl IntoIterator, +) -> String { + let init = text.into(); + replacements + .into_iter() + .fold(init, |text, replacement| replacement.replace_all(&text).to_string()) +} + +/// Workaround fix by wdanilo, see: +pub fn js_workaround_patcher(code: impl Into) -> Result { + let patched_code = multi_replace_all(code, REPLACEMENTS.as_slice()); + let epilogue = r"export function after_load(w,m) { wasm = w; init.__wbindgen_wasm_module = m;}"; + Ok(format!("{patched_code}\n{epilogue}")) +} + +pub fn patch_js_glue(input_path: impl AsRef, output_path: impl AsRef) -> Result { + debug!("Patching {}.", output_path.as_ref().display()); + let code = ide_ci::fs::read_to_string(&input_path)?; + let patched_code = js_workaround_patcher(code)?; + ide_ci::fs::write(output_path, patched_code)?; + Ok(()) +} + +pub fn patch_js_glue_in_place(path: impl AsRef) -> Result { + patch_js_glue(&path, &path) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn validate_regexes() { + for _replacement in REPLACEMENTS.iter() {} + } +} diff --git a/build/build/src/project/wasm/test.rs b/build/build/src/project/wasm/test.rs new file mode 100644 index 0000000000..2a233d1c56 --- /dev/null +++ b/build/build/src/project/wasm/test.rs @@ -0,0 +1,149 @@ +use crate::project::*; + +use anyhow::Context; +use ide_ci::programs::cargo; +use ide_ci::programs::WasmPack; + + + +/// List of crates that should not be tested by wasm-pack test. +const PACKAGE_BLACKLIST: [&str; 1] = ["integration-test"]; + +/// Attributes that denote WASM tests. +const WASM_TEST_ATTRIBUTES: [&str; 2] = ["#[wasm_bindgen_test]", "#[wasm_bindgen_test(async)]"]; + +/// Subdirectories in the crate directory that contain sources for the crate. +const SOURCE_SUBDIRECTORIES: [&str; 4] = ["src", "benches", "examples", "tests"]; + +/// Lists members of given Cargo.toml workspace. +fn get_all_crates(repo_root: impl AsRef) -> Result> { + let pattern = repo_root.as_ref().join("**/Cargo.toml"); + let all_paths = + glob::glob(pattern.as_str()).context(format!("Globbing {} failed", pattern.display()))?; + let valid_paths = all_paths.filter_map(|path| match path { + // FIXME explain unwrap + Ok(path) => Some(path.parent().unwrap().to_owned()), + Err(err) => { + error!("{err}"); + None + } + }); + Ok(valid_paths.collect()) +} + +/// Check if the given line of source code is an attribute denoting wasm test. +fn is_wasm_test_attribute(line: &str) -> bool { + WASM_TEST_ATTRIBUTES.contains(&line.trim()) +} + +/// Check if the given workspace member contains any wasm tests in the sources. +fn has_wasm_tests(member: &Path) -> bool { + if let Some(member) = member.to_str() { + // We go over selected subdirectories only to avoid entering into sources of other crates + // that are nested within this crate subtree. + for subdir in SOURCE_SUBDIRECTORIES { + let pattern = format!("{}/{}/**/*.rs", member, subdir); + for entry in glob::glob(&pattern).unwrap() { + let contents = ide_ci::fs::read_to_string(entry.unwrap()).unwrap(); + if contents.lines().any(is_wasm_test_attribute) { + return true; + } + } + } + false + } else { + println!( + "cargo:warning=Skipping the crate {} containing non-UTF-8 characters in its path. ", + member.to_string_lossy() + ); + false + } +} + +/// Parses file under given path as TOML value. +fn parse_toml(path: impl AsRef) -> toml::Value { + let path = path.as_ref(); + let data = ide_ci::fs::read_to_string(path).unwrap(); + data.parse().unwrap() +} + +/// Checks if the given member is blacklisted from running the tests. +fn blacklisted(memeber: &Path) -> bool { + PACKAGE_BLACKLIST.iter().any(|blacklisted| memeber.ends_with(blacklisted)) +} + +/// Checks if given workspace member is a proc-macro crate. +fn is_proc_macro_crate(member: &Path) -> bool { + let cargo_toml_path = member.join("Cargo.toml"); + let cargo_toml_root = parse_toml(cargo_toml_path); + get_proc_macro(cargo_toml_root).contains(&true) +} + +/// Retrieve a `lib.proc-macro` field from Cargo.toml +fn get_proc_macro(cargo_toml: toml::Value) -> Option { + cargo_toml.get("lib")?.get("proc-macro")?.as_bool() +} + +pub async fn test_all(repo_root: PathBuf) -> Result { + // FIXME args + //let wasm_pack_args = std::env::args().skip(1).collect::>(); + let all_members = get_all_crates(&repo_root)?; + + for member in all_members { + let member_str = member.to_string_lossy(); + if blacklisted(&member) { + info!("Skipping blacklisted crate {member_str}"); + } else if is_proc_macro_crate(&member) { + info!("Skipping proc-macro crate {member_str}"); + } else if has_wasm_tests(&member) { + info!("Running tests for {member_str}"); + WasmPack + .cmd()? + .current_dir(&repo_root) + .arg("test") + .arg("--headless") + .arg("--chrome") + .env("WASM_BINDGEN_TEST_TIMEOUT", "300") + // .args(&wasm_pack_args) + .arg(member.strip_prefix(&repo_root).unwrap()) + .apply(&cargo::Color::Always) + .run_ok() + .await?; + } else { + println!("No wasm tests in {}", member_str); + } + } + Ok(()) +} + +// /// Call wasm-pack test for each workspace member +// /// +// /// This function reads workspace members list from `Cargo.toml` in current directory, and call +// /// `wasm-pack test` each member. All script arguments are passed to `wasm-pack` process. +// fn main() { +// let wasm_pack_args = std::env::args().skip(1).collect::>(); +// let all_members = get_all_crates(); +// +// for member in all_members { +// let member_str = member.to_string_lossy(); +// if blacklisted(&member) { +// println!("Skipping blacklisted crate {}", member_str); +// } else if is_proc_macro_crate(&member) { +// println!("Skipping proc-macro crate {}", member_str); +// } else if has_wasm_tests(&member) { +// println!("Running tests for {}", member_str); +// let mut command = std::process::Command::new("wasm-pack"); +// command.arg("test").args(&wasm_pack_args).arg(&member); +// println!("{:?}", command); +// let status = command.status().unwrap(); +// if !status.success() { +// panic!("Process for {} failed!{}", member_str, match status.code() { +// Some(code) => format!(" Code: {}", code), +// None => String::new(), +// }); +// } +// } else { +// println!("No wasm tests in {}", member_str); +// } +// } +// } diff --git a/build/build/src/project_manager.rs b/build/build/src/project_manager.rs new file mode 100644 index 0000000000..9ed0d244a8 --- /dev/null +++ b/build/build/src/project_manager.rs @@ -0,0 +1,40 @@ +use crate::prelude::*; + +use crate::paths::TargetTriple; +use crate::paths::ARCHIVE_EXTENSION; + + + +pub fn url(target: &TargetTriple) -> Result { + #[allow(clippy::format_in_format_args)] // [mwu] I find this more readable. + let url_text = format!( + "https://github.com/enso-org/{repo}/releases/download/{tag}/{asset}.{ext}", + repo = "ci-build", + tag = target.versions.tag(), + asset = format!("project-manager-bundle-{target}"), + ext = ARCHIVE_EXTENSION, + ); + Url::parse(&url_text).anyhow_err() +} + +pub async fn ensure_present(dist_path: impl AsRef, target: &TargetTriple) -> Result { + // Check if already done + let build_info_file = dist_path.as_ref().join("installed-enso-version"); + let old_info = dbg!(build_info_file.read_to_json::()); + if old_info.contains(target) { + debug!( + "Project Manager in version {target} is already installed, according to {info}.", + info = build_info_file.display() + ); + } else { + // We remove the build info file to avoid misinformation if the build is interrupted during + // the call to `download_project_manager`. + ide_ci::fs::remove_if_exists(&build_info_file)?; + + let url = url(target)?; + ide_ci::io::download_and_extract(url, &dist_path).await?; + ide_ci::fs::allow_owner_execute(crate::paths::project_manager(&dist_path))?; + build_info_file.write_as_json(&target)?; + } + Ok(()) +} diff --git a/build/build/src/release.rs b/build/build/src/release.rs new file mode 100644 index 0000000000..5eccffa697 --- /dev/null +++ b/build/build/src/release.rs @@ -0,0 +1,135 @@ +use crate::prelude::*; + +use crate::context::BuildContext; +use crate::paths::generated; +use crate::paths::TargetTriple; +use crate::paths::EDITION_FILE_ARTIFACT_NAME; +use crate::project; + +use ide_ci::github; +use ide_ci::programs::Docker; +use octocrab::models::repos::Release; +use tempfile::tempdir; + + + +pub async fn create_release(context: &BuildContext) -> Result { + let versions = &context.triple.versions; + let commit = ide_ci::actions::env::GITHUB_SHA.get()?; + + let changelog_contents = ide_ci::fs::read_to_string(&context.repo_root.changelog_md)?; + let latest_changelog_body = + crate::changelog::Changelog(&changelog_contents).top_release_notes()?; + + debug!("Preparing release {} for commit {}", versions.version, commit); + let release = context + .remote_repo + .repos(&context.octocrab) + .releases() + .create(&versions.tag()) + .target_commitish(&commit) + .name(&versions.pretty_name()) + .body(&latest_changelog_body.contents) + .prerelease(true) + .draft(true) + .send() + .await?; + + crate::env::ReleaseId.emit(&release.id)?; + Ok(release) +} + +pub async fn publish_release(context: &BuildContext) -> Result { + let BuildContext { inner: project::Context { octocrab, .. }, remote_repo, triple, .. } = + context; + + let release_id = crate::env::ReleaseId.fetch()?; + + debug!("Looking for release with id {release_id} on github."); + let release = remote_repo.repos(octocrab).releases().get_by_id(release_id).await?; + ensure!(release.draft, "Release has been already published!"); + + debug!("Found the target release, will publish it."); + remote_repo.repos(octocrab).releases().update(release.id.0).draft(false).send().await?; + debug!("Done. Release URL: {}", release.url); + + let temp = tempdir()?; + let edition_file_path = crate::paths::generated::RepoRootDistributionEditions::new_root( + temp.path(), + triple.versions.edition_name(), + ) + .edition_yaml; + + + ide_ci::actions::artifacts::download_single_file_artifact( + EDITION_FILE_ARTIFACT_NAME, + &edition_file_path, + ) + .await?; + + debug!("Updating edition in the AWS S3."); + crate::aws::update_manifest(remote_repo, &edition_file_path).await?; + + Ok(()) +} + +pub async fn deploy_to_ecr(context: &BuildContext, repository: String) -> Result { + let octocrab = &context.octocrab; + let release_id = crate::env::ReleaseId.fetch()?; + + let linux_triple = TargetTriple { os: OS::Linux, ..context.triple.clone() }; + let package_name = + generated::RepoRootBuiltDistribution::new_root(".", linux_triple.to_string()) + .enso_engine_triple + .file_name() + .context("Failed to get Engine Package name.")? + .as_str() + .to_string(); + + let release = context.remote_repo.find_release_by_id(octocrab, release_id).await?; + let asset = github::find_asset_by_text(&release, &package_name)?; + + + let temp_for_archive = tempdir()?; + let downloaded_asset = context + .remote_repo + .download_asset_to(octocrab, asset, temp_for_archive.path().to_owned()) + .await?; + + let temp_for_extraction = tempdir()?; + ide_ci::archive::extract_to(&downloaded_asset, &temp_for_extraction).await?; + + let engine_package = generated::EnginePackage::new_under( + &temp_for_extraction, + context.triple.versions.version.to_string(), + ); + + + let config = &aws_config::load_from_env().await; + let client = aws_sdk_ecr::Client::new(config); + let repository_uri = crate::aws::ecr::get_repository_uri(&client, &repository).await?; + let tag = format!("{}:{}", repository_uri, context.triple.versions.version); + let _image = crate::aws::ecr::runtime::build_runtime_image( + context.repo_root.tools.ci.docker.clone(), + engine_package, + tag.clone(), + ) + .await?; + + let credentials = crate::aws::ecr::get_credentials(&client).await?; + Docker.while_logged_in(credentials, || async move { Docker.push(&tag).await }).await?; + Ok(()) +} + +pub async fn dispatch_cloud_image_build_action(octocrab: &Octocrab, version: &Version) -> Result { + let input = serde_json::json!({ + "version": version.to_string(), + }); + octocrab + .actions() + .create_workflow_dispatch("enso-org", "cloud-v2", "build-image.yaml", "main") + .inputs(input) + .send() + .await + .context("Failed to dispatch the cloud image build action.") +} diff --git a/build/build/src/repo.rs b/build/build/src/repo.rs new file mode 100644 index 0000000000..1db230ee4a --- /dev/null +++ b/build/build/src/repo.rs @@ -0,0 +1,36 @@ +use crate::prelude::*; + + + +/// Heuristic that checks if given path can be plausibly considered to be the root of the Enso +/// repository. +/// +/// Current heuristic is: contains Cargo workspace root and SBT build configuration file. +#[instrument(fields(path = %path.as_ref().display()), ret)] +pub fn looks_like_enso_repository_root(path: impl AsRef) -> bool { + (move || -> Result { + let cargo_toml = path.as_ref().join("Cargo.toml"); + if !ide_ci::fs::read_to_string(cargo_toml)?.contains("[workspace]") { + return Ok(false); + } + + Ok(path.as_ref().join("build.sbt").exists()) + })() + .unwrap_or(false) +} + +#[instrument(ret, err)] +pub fn deduce_repository_path() -> Result { + let candidate_paths = [ + std::env::current_dir().ok(), + std::env::current_dir().ok().and_then(|p| p.parent().map(ToOwned::to_owned)), + std::env::current_dir().ok().and_then(|p| p.parent().map(|p| p.join("enso5"))), + std::env::current_dir().ok().and_then(|p| p.parent().map(|p| p.join("enso"))), + ]; + for candidate in candidate_paths { + if let Some(path) = candidate && looks_like_enso_repository_root(&path) { + return Ok(path) + } + } + bail!("Could not deduce repository path.") +} diff --git a/build/build/src/rust.rs b/build/build/src/rust.rs new file mode 100644 index 0000000000..0620b09d80 --- /dev/null +++ b/build/build/src/rust.rs @@ -0,0 +1,8 @@ +use crate::prelude::*; + + +// ============== +// === Export === +// ============== + +pub mod parser; diff --git a/build/build/src/rust/parser.rs b/build/build/src/rust/parser.rs new file mode 100644 index 0000000000..f0841d9315 --- /dev/null +++ b/build/build/src/rust/parser.rs @@ -0,0 +1,86 @@ +use super::*; + +use crate::paths::generated::RepoRoot; + +use ide_ci::programs::cargo; +use ide_ci::programs::java; +use ide_ci::programs::javac; +use ide_ci::programs::Cargo; +use ide_ci::programs::Java; +use ide_ci::programs::Javac; + + + +const GENERATOR_CRATE_NAME: &str = "enso-parser-generate-java"; +const PARSER_JNI_CRATE_NAME: &str = "enso-parser-jni"; +const GENERATOR_BIN_NAME: &str = GENERATOR_CRATE_NAME; +const TEST_GENERATOR_BIN_NAME: &str = "java-tests"; +const GENERATED_CODE_NAMESPACE: [&str; 3] = ["org", "enso", "syntax2"]; +const GENERATED_TEST_CLASS: &str = "GeneratedFormatTests"; +const JAVA_EXTENSION: &str = ".java"; + +pub fn cargo_run_generator_cmd(repo_root: &Path, binary_name: &str) -> Result { + let mut ret = Cargo.cmd()?; + ret.current_dir(repo_root) + .apply(&cargo::Command::Run) + .apply(&cargo::Options::Package(GENERATOR_CRATE_NAME.into())) + .apply(&cargo::RunOption::Bin(binary_name.into())); + Ok(ret) +} + +#[context("Failed to generate Java code of the new parse bindings.")] +pub async fn generate_java_to(repo_root: &Path, output_path: &Path) -> Result { + // Generator requires that output directory exists. + // Also, we remove its previous content so the old artifacts don't pollute the output. + ide_ci::fs::tokio::reset_dir(&output_path).await?; + cargo_run_generator_cmd(repo_root, GENERATOR_BIN_NAME)? + .arg("--") + .arg(output_path) + .run_ok() + .await?; + + Ok(()) +} + +pub async fn generate_java(repo_root: &RepoRoot) -> Result { + let output_path = repo_root.target.generated_java.join_iter(GENERATED_CODE_NAMESPACE); + generate_java_to(repo_root, &output_path).await +} + +fn cargo_build_parser_jni(repo_root: &Path) -> Result { + let mut ret = Cargo.cmd()?; + ret.current_dir(repo_root) + .apply(&cargo::Command::Build) + .apply(&cargo::Options::Package(PARSER_JNI_CRATE_NAME.into())); + Ok(ret) +} + +#[context("Running self-tests for the generated Java sources failed.")] +pub async fn run_self_tests(repo_root: &RepoRoot) -> Result { + cargo_build_parser_jni(repo_root)?.run_ok().await?; + let base = &repo_root.target.generated_java; + let lib = &repo_root.lib.rust.parser.generate_java.java; + let package = repo_root.target.generated_java.join_iter(GENERATED_CODE_NAMESPACE); + let test = package.join(GENERATED_TEST_CLASS).with_extension(JAVA_EXTENSION); + let test_class = + GENERATED_CODE_NAMESPACE.into_iter().chain(Some(GENERATED_TEST_CLASS)).join("."); + + let tests_code = cargo_run_generator_cmd(repo_root, TEST_GENERATOR_BIN_NAME)? + .output_ok() + .await? + .into_stdout_string()?; + trace!("Generated test code:\n{tests_code}"); + ide_ci::fs::tokio::write(&test, tests_code).await?; + + Javac + .cmd()? + .apply(&javac::Classpath::new([lib.as_path(), base.as_path()])) + .apply(&javac::Options::Directory(base.into())) + .arg(&test) + .run_ok() + .await?; + + Java.cmd()?.apply(&java::Classpath::new([&base])).arg(&test_class).run_ok().await?; + + Ok(()) +} diff --git a/build/build/src/source.rs b/build/build/src/source.rs new file mode 100644 index 0000000000..47ffd06ca4 --- /dev/null +++ b/build/build/src/source.rs @@ -0,0 +1,99 @@ +use crate::prelude::*; + +use crate::project::IsTarget; +use crate::project::IsWatchable; + +use derivative::Derivative; +use ide_ci::models::config::RepoContext; +use octocrab::models::AssetId; +use octocrab::models::RunId; + + + +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub enum ExternalSource { + #[derivative(Debug = "transparent")] + OngoingCiRun(OngoingCiRunSource), + #[derivative(Debug = "transparent")] + CiRun(CiRunSource), + #[derivative(Debug = "transparent")] + LocalFile(PathBuf), + #[derivative(Debug = "transparent")] + Release(ReleaseSource), +} + +#[derive(Derivative)] +#[derivative(Debug)] +pub enum Source { + #[derivative(Debug = "transparent")] + BuildLocally(Target::BuildInput), + #[derivative(Debug = "transparent")] + External(ExternalSource), +} + +#[derive(Clone, Debug)] +pub struct OngoingCiRunSource { + pub artifact_name: String, +} + +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct CiRunSource { + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub repository: RepoContext, + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub run_id: RunId, + pub artifact_name: String, +} + +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct ReleaseSource { + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub repository: RepoContext, + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub asset_id: AssetId, +} + +#[derive(Clone, Debug, derive_more::Deref, derive_more::DerefMut)] +pub struct WithDestination { + #[deref] + #[deref_mut] + pub inner: T, + pub destination: PathBuf, +} + +impl WithDestination> { + pub fn to_external(&self) -> Option { + match &self.inner { + Source::BuildLocally(_) => None, + Source::External(external) => Some(WithDestination { + inner: external.clone(), + destination: self.destination.clone(), + }), + } + } +} + +impl WithDestination { + pub fn map(self, f: impl FnOnce(T) -> U) -> WithDestination { + WithDestination { inner: f(self.inner), destination: self.destination } + } +} + +pub type GetTargetJob = WithDestination>; +pub type FetchTargetJob = WithDestination; +pub type BuildTargetJob = WithDestination<::BuildInput>; + +#[derive(Debug)] +pub struct WatchTargetJob { + pub build: BuildTargetJob, + pub watch_input: Target::WatchInput, +} + +#[derive(Debug)] +pub enum FetchOrWatch { + Fetch(FetchTargetJob), + Watch(WatchTargetJob), +} diff --git a/build/build/src/version.rs b/build/build/src/version.rs new file mode 100644 index 0000000000..9b861ed6bb --- /dev/null +++ b/build/build/src/version.rs @@ -0,0 +1,277 @@ +use crate::prelude::*; + +use anyhow::Context; +use chrono::Datelike; +use derivative::Derivative; +use ide_ci::define_env_var; +use ide_ci::env::new::TypedVariable; +use ide_ci::models::config::RepoContext; +use octocrab::models::repos::Release; +use semver::Prerelease; +use std::collections::BTreeSet; +use strum::EnumIter; +use strum::EnumString; +use strum::IntoEnumIterator; +use tracing::instrument; + + + +// Variable that stores Enso Engine version. +define_env_var! { + ENSO_VERSION, Version; + ENSO_EDITION, String; + ENSO_RELEASE_MODE, bool; +} + +pub const LOCAL_BUILD_PREFIX: &str = "dev"; +pub const NIGHTLY_BUILD_PREFIX: &str = "nightly"; + +pub fn default_dev_version() -> Version { + let mut ret = Version::new(0, 0, 0); + ret.pre = Prerelease::new(LOCAL_BUILD_PREFIX).unwrap(); + ret +} + +pub fn is_nightly_release(release: &Release) -> bool { + !release.draft && release.tag_name.contains(NIGHTLY_BUILD_PREFIX) +} + +pub async fn nightly_releases( + octocrab: &Octocrab, + repo: &RepoContext, +) -> Result> { + Ok(repo.all_releases(octocrab).await?.into_iter().filter(is_nightly_release)) +} + +pub async fn latest_nightly_release(octocrab: &Octocrab, repo: &RepoContext) -> Result { + // TODO: this assumes that releases are returned in date order, to be confirmed + // (but having to download all the pages to see which is latest wouldn't be nice) + nightly_releases(octocrab, repo).await?.next().context("Failed to find any nightly releases.") +} + + +#[derive(Clone, Derivative, Serialize, Deserialize, Shrinkwrap, PartialEq, Eq)] +#[derivative(Debug)] +pub struct Versions { + #[shrinkwrap(main_field)] + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub version: Version, + pub release_mode: bool, +} + +impl Default for Versions { + fn default() -> Self { + Versions { version: default_dev_version(), release_mode: false } + } +} + +impl Versions { + pub fn new(version: Version) -> Self { + let release_mode = !version.pre.as_str().contains(LOCAL_BUILD_PREFIX) + && !version.pre.as_str().contains("SNAPSHOT"); + Versions { version, release_mode } + } + + pub fn edition_name(&self) -> String { + self.version.to_string() + } + + pub fn pretty_name(&self) -> String { + format!("Enso {}", self.version) + } + + pub fn local_prerelease() -> Result { + Prerelease::new(LOCAL_BUILD_PREFIX).anyhow_err() + } + + pub async fn nightly_prerelease(octocrab: &Octocrab, repo: &RepoContext) -> Result { + let date = chrono::Utc::now(); + let date = date.format("%F").to_string(); + + let todays_pre_text = format!("{}.{}", NIGHTLY_BUILD_PREFIX, date); + let generate_ith = |index: u32| -> Result { + let pre = if index == 0 { + Prerelease::from_str(&todays_pre_text)? + } else { + Prerelease::from_str(&format!("{}.{}", todays_pre_text, index))? + }; + Ok(pre) + }; + + let relevant_nightly_versions = nightly_releases(octocrab, repo) + .await? + .filter_map(|release| { + if release.tag_name.contains(&todays_pre_text) { + let version = Version::parse(&release.tag_name).ok()?; + Some(version.pre) + } else { + None + } + }) + .collect::>(); + + // Generate subsequent tonight nightly subreleases, until a free one is found. + // Should happen rarely. + for index in 0.. { + let pre = generate_ith(index)?; + if !relevant_nightly_versions.contains(&pre) { + return Ok(pre); + } + } + unreachable!("After infinite loop.") + } + + pub fn tag(&self) -> String { + self.version.to_string() + } + + pub fn publish(&self) -> Result { + let edition = self.edition_name(); + ENSO_VERSION.emit_to_workflow(&self.version)?; + ENSO_EDITION.emit_to_workflow(edition.as_str())?; + ENSO_RELEASE_MODE.emit_to_workflow(&self.release_mode)?; + Ok(()) + } +} + +#[context("Deducing version using changelog file: {}", changelog_path.as_ref().display())] +pub fn base_version(changelog_path: impl AsRef) -> Result { + if let Ok(from_env) = ENSO_VERSION.get() { + return Ok(from_env); + } + + let changelog_contents = ide_ci::fs::read_to_string(changelog_path.as_ref())?; + let mut headers = crate::changelog::Changelog(&changelog_contents) + .iterate_headers() + .map(|h| Version::find_in_text(h.text)); + + let version = match headers.next() { + None => generate_initial_version(), + Some(Ok(top_version)) => top_version, + Some(Err(_top_non_version_thingy)) => match headers.next() { + Some(Ok(version)) => suggest_next_version(&version), + None => generate_initial_version(), + Some(Err(_)) => bail!("Two leading release headers have no version number in them."), + }, + }; + Ok(version) +} + +pub fn current_year() -> u64 { + chrono::Utc::today().year() as u64 +} + +pub fn generate_initial_version() -> Version { + Version::new(current_year(), 1, 1) +} + +pub fn suggest_next_version(previous: &Version) -> Version { + let year = current_year(); + if previous.major == year { + Version::new(year, previous.minor + 1, 1) + } else { + generate_initial_version() + } +} + +#[instrument(ret)] +pub fn versions_from_env(expected_build_kind: Option) -> Result> { + if let Ok(version) = ENSO_VERSION.get() { + // The currently adopted version scheme uses same string for version and edition name, + // so we enforce it here. There are no fundamental reasons for this requirement. + if let Ok(edition) = ENSO_EDITION.get() { + ensure!( + version.to_string() == edition, + "Inconsistent {} and {} variable values.", + ENSO_VERSION.name, + ENSO_EDITION.name + ); + } + if let Some(expected_build_kind) = expected_build_kind { + let found_build_kind = BuildKind::deduce(&version)?; + ensure!( + found_build_kind == expected_build_kind, + "Build kind mismatch. Found: {}, expected: {}.", + found_build_kind, + expected_build_kind + ) + } + let versions = Versions::new(version); + Ok(Some(versions)) + } else { + Ok(None) + } +} + +#[instrument(skip_all, ret)] +pub async fn deduce_versions( + octocrab: &Octocrab, + build_kind: BuildKind, + target_repo: Result<&RepoContext>, + root_path: impl AsRef, +) -> Result { + debug!("Deciding on version to target."); + if let Some(versions) = versions_from_env(Some(build_kind))? { + Ok(versions) + } else { + let changelog_path = crate::paths::root_to_changelog(&root_path); + let version = Version { + pre: match build_kind { + BuildKind::Dev => Versions::local_prerelease()?, + BuildKind::Nightly => Versions::nightly_prerelease(octocrab, target_repo?).await?, + }, + ..base_version(&changelog_path)? + }; + Ok(Versions::new(version)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn is_nightly_test() { + let is_nightly = |text: &str| BuildKind::Nightly.matches(&Version::parse(text).unwrap()); + assert!(is_nightly("2022.1.1-nightly.2022.1.1")); + assert!(is_nightly("2022.1.1-nightly")); + assert!(is_nightly("2022.1.1-nightly.2022.1.1")); + assert!(is_nightly("2022.1.1-nightly.2022.1.1")); + + let version = Version::parse("2022.1.1-nightly.2022-06-06.3").unwrap(); + assert!(BuildKind::deduce(&version).contains(&BuildKind::Nightly)); + } + + #[test] + #[ignore] + fn iii() -> Result { + dbg!(base_version(r"H:\nbo\enso\app\gui\changelog.md")?); + Ok(()) + } +} + +#[derive(clap::ArgEnum, Clone, Copy, PartialEq, Eq, Debug, EnumString, EnumIter, strum::Display)] +#[strum(serialize_all = "kebab-case")] +pub enum BuildKind { + Dev, + Nightly, +} + +impl BuildKind { + pub fn prerelease_prefix(self) -> &'static str { + match self { + BuildKind::Dev => LOCAL_BUILD_PREFIX, + BuildKind::Nightly => NIGHTLY_BUILD_PREFIX, + } + } + + pub fn matches(self, version: &Version) -> bool { + version.pre.as_str().starts_with(self.prerelease_prefix()) + } + + pub fn deduce(version: &Version) -> Result { + BuildKind::iter() + .find(|kind| kind.matches(version)) + .context(format!("Failed to deduce build kind for version {version}")) + } +} diff --git a/build/ci_utils/Cargo.toml b/build/ci_utils/Cargo.toml new file mode 100644 index 0000000000..a852960c20 --- /dev/null +++ b/build/ci_utils/Cargo.toml @@ -0,0 +1,91 @@ +[package] +name = "ide-ci" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +anyhow = "1.0.44" +async-compression = { version = "0.3.12", features = ["tokio", "gzip"] } +async-trait = "0.1.51" +bincode = "1.3.3" +byte-unit = "4.0.14" +bytes = "1.0.0" +cached = "0.39.0" +convert_case = "0.6.0" +cfg-if = "1.0.0" +chrono = { version = "0.4.19", features = ["serde"] } +clap = { version = "3.1.5", features = ["derive", "env"] } +cron = "0.12.0" +data-encoding = "2.3.2" +derivative = "2.2.0" +derive_more = "0.99.17" +dirs = "4.0.0" +filetime = "0.2.15" +flate2 = "1.0.22" +flume = "0.10.10" +fn-error-context = "0.2.0" +fs_extra = "1.2.0" +futures = "0.3.17" +futures-util = "0.3.17" +glob = "0.3.0" +graphql_client = "0.11.0" +headers = "0.3.7" +heck = "0.4.0" +http-serde = "1.1.0" +ifmt = "0.3.3" +indexmap = "1.7.0" +indicatif = "0.17.1" +itertools = "0.10.1" +lazy_static = "1.4.0" +log = "0.4.14" +mime = "0.3.16" +new_mime_guess = "4.0.0" +nix = { workspace = true } +octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [ + "rustls" +] } +paste = "1.0.7" +path-absolutize = "3.0.11" +pathdiff = "0.2.1" +path-slash = "0.2.1" +platforms = "3.0.0" +pin-project = "1.0.8" +port_check = "0.1.5" +pretty_env_logger = "0.4.0" +proc-macro2 = "1.0.36" +quote = "1.0.15" +rand = "0.8.4" +regex = "1.5.4" +reqwest = { version = "0.11.5", default-features = false, features = [ + "stream" +] } +snafu = "0.7.0" +semver = { version = "1.0.4", features = ["serde"] } +serde = { version = "1.0.130", features = ["derive"] } +serde_json = "1.0.68" +serde_yaml = "0.9.10" +scopeguard = "1.1.0" +sha2 = "0.10.2" +shrinkwraprs = "0.3.0" +strum = { version = "0.24.0", features = ["derive"] } +symlink = "0.1.0" +syn = "1.0.86" +sysinfo = "0.26.2" +tar = "0.4.37" +tempfile = "3.2.0" +tokio = { workspace = true } +tokio-util = { version = "0.7.2", features = ["full"] } +tracing = "0.1.32" +tracing-subscriber = { version = "0.3.11", features = ["env-filter"] } +unicase = "2.6.0" +url = "2.2.2" +uuid = { version = "1.1.0", features = ["v4", "serde"] } +walkdir = "2.3.2" +which = "4.2.2" +whoami = "1.2.1" +zip = "0.6.2" + +[dev-dependencies] +warp = "0.3.2" +wiremock = "0.5.10" diff --git a/build/ci_utils/src/actions.rs b/build/ci_utils/src/actions.rs new file mode 100644 index 0000000000..c136c484a1 --- /dev/null +++ b/build/ci_utils/src/actions.rs @@ -0,0 +1,10 @@ +// ============== +// === Export === +// ============== + +pub mod artifacts; +pub mod context; +pub mod env; +pub mod workflow; + +pub use context::Context; diff --git a/build/ci_utils/src/actions/artifacts.rs b/build/ci_utils/src/actions/artifacts.rs new file mode 100644 index 0000000000..ef464f6aea --- /dev/null +++ b/build/ci_utils/src/actions/artifacts.rs @@ -0,0 +1,258 @@ +use crate::prelude::*; + +use crate::actions::artifacts::download::FileToDownload; +use crate::actions::artifacts::run_session::SessionClient; +use crate::actions::artifacts::upload::ArtifactUploader; +use crate::actions::artifacts::upload::FileToUpload; +use crate::actions::artifacts::upload::UploadOptions; + +use anyhow::Context as Trait_anyhow_Context; +use flume::Sender; +use serde::de::DeserializeOwned; +use tempfile::tempdir; + + +// ============== +// === Export === +// ============== + +pub mod artifact; +pub mod context; +pub mod download; +pub mod models; +pub mod raw; +pub mod run_session; +pub mod upload; + + + +pub const API_VERSION: &str = "6.0-preview"; + + +pub async fn execute_dbg( + client: &reqwest::Client, + reqeust: reqwest::RequestBuilder, +) -> Result { + let request = reqeust.build()?; + dbg!(&request); + let response = client.execute(request).await?; + dbg!(&response); + let text = response.text().await?; + debug!("{}", &text); + let deserialized = serde_json::from_str(&text)?; + dbg!(&deserialized); + Ok(deserialized) +} + +pub fn discover_and_feed(root_path: impl AsRef, sender: Sender) -> Result { + walkdir::WalkDir::new(&root_path).into_iter().try_for_each(|entry| { + let entry = entry?; + if entry.file_type().is_file() { + let file = FileToUpload::new_relative(&root_path, entry.path())?; + sender + .send(file) + .context("Stopping discovery in progress, because all listeners were dropped.")?; + }; + Ok(()) + }) +} + +pub fn discover_recursive( + root_path: impl Into, +) -> impl Stream + Send { + let root_path = root_path.into(); + + let (tx, rx) = flume::unbounded(); + tokio::task::spawn_blocking(move || discover_and_feed(root_path, tx)); + rx.into_stream() +} + +pub async fn upload( + file_provider: impl Stream + Send + 'static, + artifact_name: impl AsRef, + options: UploadOptions, +) -> Result { + let handler = + ArtifactUploader::new(SessionClient::new_from_env()?, artifact_name.as_ref()).await?; + let result = handler.upload_artifact_to_file_container(file_provider, &options).await; + // We want to patch size even if there were some failures. + handler.patch_artifact_size().await?; + result +} + +pub fn upload_single_file( + file: impl Into, + artifact_name: impl AsRef, +) -> impl Future { + let file = file.into(); + let files = single_file_provider(file); + (async move || -> Result { upload(files?, artifact_name, default()).await })() +} + +pub fn upload_directory( + dir: impl Into, + artifact_name: impl AsRef, +) -> impl Future { + let dir = dir.into(); + info!("Uploading directory {}.", dir.display()); + let files = single_dir_provider(&dir); + (async move || -> Result { upload(files?, artifact_name, default()).await })() +} + +#[tracing::instrument(skip_all , fields(artifact_name = %artifact_name.as_ref(), target = %target.as_ref().display()), err)] +pub async fn download_single_file_artifact( + artifact_name: impl AsRef, + target: impl AsRef, +) -> Result { + let downloader = + download::ArtifactDownloader::new(SessionClient::new_from_env()?, artifact_name.as_ref()) + .await?; + match downloader.file_items().collect_vec().as_slice() { + [item] => { + let file = FileToDownload { + target: target.as_ref().into(), + remote_source_location: item.content_location.clone(), + }; + downloader.download_file_item(&file).await?; + } + _ => bail!("The artifact {} does not contain only a single file.", artifact_name.as_ref()), + }; + Ok(()) +} + +pub fn single_file_provider( + path: impl Into, +) -> Result + 'static> { + let file = FileToUpload::new_in_root(path)?; + Ok(futures::stream::iter([file])) +} + +pub fn single_dir_provider(path: &Path) -> Result + 'static> { + // TODO not optimal, could discover files at the same time as handling them. + let files = walkdir::WalkDir::new(path) + .into_iter() + .try_collect_vec()? + .into_iter() + .filter(|entry| !entry.file_type().is_dir()) + .map(|entry| FileToUpload::new_relative(path, entry.path())) + .try_collect_vec()?; + + info!("Discovered {} files under the {}.", files.len(), path.display()); + Ok(futures::stream::iter(files)) +} + +#[tracing::instrument(skip_all , fields(path = %path_to_upload.as_ref().display(), artifact = artifact_name.as_ref()), err)] +pub async fn upload_compressed_directory( + path_to_upload: impl AsRef + Send, + artifact_name: impl AsRef + Send, +) -> Result { + let artifact_name = artifact_name.as_ref(); + let tempdir = tempdir()?; + let archive_path = tempdir.path().join(format!("{artifact_name}.tar.gz")); + + info!("Packing {} to {}", path_to_upload.as_ref().display(), archive_path.display()); + crate::archive::pack_directory_contents(&archive_path, path_to_upload).await?; + + info!("Starting upload of {artifact_name}."); + upload_single_file(&archive_path, artifact_name).await?; + info!("Completed upload of {artifact_name}."); + Ok(()) +} + +#[tracing::instrument(skip_all , fields(path = %path_to_extract.as_ref().display(), artifact = artifact_name.as_ref()), err)] +pub async fn retrieve_compressed_directory( + artifact_name: impl AsRef + Send, + path_to_extract: impl AsRef + Send, +) -> Result { + let artifact_name = artifact_name.as_ref(); + let tempdir = tempdir()?; + let archive_path = tempdir.path().join(format!("{artifact_name}.tar.gz")); + + download_single_file_artifact(&artifact_name, &archive_path).await?; + crate::archive::extract_to(&archive_path, &path_to_extract).await?; + Ok(()) +} + + +#[cfg(test)] +mod tests { + use super::*; + use crate::actions::artifacts::models::CreateArtifactResponse; + use reqwest::StatusCode; + use tempfile::TempDir; + use wiremock::matchers::method; + use wiremock::Mock; + use wiremock::MockServer; + use wiremock::ResponseTemplate; + + #[tokio::test(flavor = "multi_thread", worker_threads = 4)] + #[ignore] + async fn test_artifact_upload() -> Result { + let mock_server = MockServer::start().await; + + let text = r#"{"containerId":11099678,"size":-1,"signedContent":null,"fileContainerResourceUrl":"https://pipelines.actions.githubusercontent.com/VYS7uSE1JB12MkavBOHvD6nounefzg1s5vHmQvfbiLmuvFuM6c/_apis/resources/Containers/11099678","type":"actions_storage","name":"SomeFile","url":"https://pipelines.actions.githubusercontent.com/VYS7uSE1JB12MkavBOHvD6nounefzg1s5vHmQvfbiLmuvFuM6c/_apis/pipelines/1/runs/75/artifacts?artifactName=SomeFile","expiresOn":"2022-01-29T04:07:24.5807079Z","items":null}"#; + mock_server + .register( + Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(StatusCode::CREATED).set_body_string(text)), + ) + .await; + + mock_server + .register( + Mock::given(method("PUT")) + .respond_with(ResponseTemplate::new(StatusCode::NOT_FOUND)), + ) + .await; + + std::env::set_var("ACTIONS_RUNTIME_URL", mock_server.uri()); + std::env::set_var("ACTIONS_RUNTIME_TOKEN", "password123"); + std::env::set_var("GITHUB_RUN_ID", "12"); + + let path_to_upload = "Cargo.toml"; + + let file_to_upload = FileToUpload { + local_path: PathBuf::from(path_to_upload), + remote_path: PathBuf::from(path_to_upload), + }; + + upload(futures::stream::once(ready(file_to_upload)), "MyCargoArtifact", default()).await?; + // artifacts::upload_path(path_to_upload).await?; + Ok(()) + //let client = reqwest::Client::builder().default_headers(). + } + + #[tokio::test] + async fn discover_files_in_dir() -> Result { + let dir = TempDir::new()?; + crate::fs::create(dir.join_iter(["file"]))?; + crate::fs::create(dir.join_iter(["subdir/nested_file"]))?; + let stream = single_dir_provider(dir.as_ref())?; + let v = stream.collect::>().await; + dbg!(v); + Ok(()) + } + + #[test] + fn deserialize_response() -> Result { + let text = r#"{"containerId":11099678,"size":-1,"signedContent":null,"fileContainerResourceUrl":"https://pipelines.actions.githubusercontent.com/VYS7uSE1JB12MkavBOHvD6nounefzg1s5vHmQvfbiLmuvFuM6c/_apis/resources/Containers/11099678","type":"actions_storage","name":"SomeFile","url":"https://pipelines.actions.githubusercontent.com/VYS7uSE1JB12MkavBOHvD6nounefzg1s5vHmQvfbiLmuvFuM6c/_apis/pipelines/1/runs/75/artifacts?artifactName=SomeFile","expiresOn":"2022-01-29T04:07:24.5807079Z","items":null}"#; + let response = serde_json::from_str::(text)?; + // + // let patch_request = client.patch(artifact_url.clone()) + // .query(&[("artifactName", artifact_name)]) + // .header(reqwest::header::CONTENT_TYPE, "application/json") + // .json(&PatchArtifactSize {size: file.len()}); + + let path = PathBuf::from("Cargo.toml"); + let artifact_path = path.file_name().unwrap(); // FIXME + + let client = reqwest::ClientBuilder::new().build()?; + dbg!(artifact_path); + client + .patch(response.url) + .query(&[("itemPath", artifact_path.to_str().unwrap())]) + .build()?; + + Ok(()) + } +} diff --git a/build/ci_utils/src/actions/artifacts/artifact.rs b/build/ci_utils/src/actions/artifacts/artifact.rs new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/build/ci_utils/src/actions/artifacts/artifact.rs @@ -0,0 +1 @@ + diff --git a/build/ci_utils/src/actions/artifacts/context.rs b/build/ci_utils/src/actions/artifacts/context.rs new file mode 100644 index 0000000000..9c11181b98 --- /dev/null +++ b/build/ci_utils/src/actions/artifacts/context.rs @@ -0,0 +1,83 @@ +use crate::prelude::*; + +use crate::actions::artifacts::API_VERSION; +use crate::env::expect_var; +use crate::extensions::reqwest::ClientBuilderExt; + +use mime::Mime; +use reqwest::header::HeaderMap; +use reqwest::header::HeaderValue; +use reqwest::header::ACCEPT_ENCODING; +use reqwest::Client; +use reqwest::ClientBuilder; + + + +#[derive(Clone, Debug)] +pub struct Context { + pub runtime_url: Url, + pub runtime_token: String, + pub run_id: String, + pub api_version: String, +} + +impl Context { + pub fn new_from_env() -> Result { + let runtime_url = expect_var("ACTIONS_RUNTIME_URL")?.parse()?; + let runtime_token = expect_var("ACTIONS_RUNTIME_TOKEN")?; + let run_id = expect_var("GITHUB_RUN_ID")?; + let api_version = API_VERSION.to_string(); + Ok(Context { runtime_url, runtime_token, run_id, api_version }) + } + + pub fn artifact_url(&self) -> Result { + let Context { runtime_url, run_id, api_version, .. } = self; + let url_text = format!( + "{runtime_url}_apis/pipelines/workflows/{run_id}/artifacts?api-version={api_version}" + ); + Url::parse(&url_text).anyhow_err() + } + + pub fn prepare_client(&self, accept_mime: Mime) -> Result { + let mut headers = HeaderMap::new(); + headers.insert( + reqwest::header::ACCEPT, + iformat!("{accept_mime};api-version={self.api_version}").parse()?, + ); + headers.insert( + reqwest::header::AUTHORIZATION, + format!("Bearer {}", self.runtime_token).parse()?, + ); + + Ok(ClientBuilder::new().default_headers(headers).user_agent(crate::USER_AGENT)) + } + + pub fn json_client(&self) -> Result { + self.prepare_client(mime::APPLICATION_JSON)? + .default_content_type(mime::APPLICATION_JSON) + .build() + .anyhow_err() + } + + pub fn upload_client(&self) -> Result { + let keep_alive_seconds = 3; + + let mut headers = HeaderMap::new(); + headers.insert(reqwest::header::CONNECTION, HeaderValue::from_static("Keep-Alive")); + headers.insert("Keep-Alive", keep_alive_seconds.into()); + self.prepare_client(mime::APPLICATION_OCTET_STREAM)? + .default_content_type(mime::APPLICATION_JSON) + .default_headers(headers) + .build() + .anyhow_err() + } + + pub fn download_client(&self) -> Result { + self.prepare_client(mime::APPLICATION_OCTET_STREAM)? + .default_content_type(mime::APPLICATION_JSON) + .keep_alive(10) + .default_header(ACCEPT_ENCODING, HeaderValue::try_from("gzip").unwrap()) + .build() + .anyhow_err() + } +} diff --git a/build/ci_utils/src/actions/artifacts/download.rs b/build/ci_utils/src/actions/artifacts/download.rs new file mode 100644 index 0000000000..483be8f4b0 --- /dev/null +++ b/build/ci_utils/src/actions/artifacts/download.rs @@ -0,0 +1,107 @@ +use crate::prelude::*; + +use crate::actions::artifacts::models::ArtifactResponse; +use crate::actions::artifacts::models::ContainerEntry; +use crate::actions::artifacts::models::ItemType; +use crate::actions::artifacts::run_session::SessionClient; +use crate::actions::artifacts::API_VERSION; + +use reqwest::header::HeaderMap; +use reqwest::header::HeaderValue; +use reqwest::header::ACCEPT; +use reqwest::header::ACCEPT_ENCODING; +use tokio::fs::create_dir_all; + + + +#[derive(Clone, Debug)] +pub struct ArtifactDownloader { + pub client: SessionClient, + pub artifact_name: String, + pub info: ArtifactResponse, + pub items: Vec, +} + +impl ArtifactDownloader { + pub async fn new(client: SessionClient, artifact_name: impl Into) -> Result { + let artifact_name = artifact_name.into(); + let list = client.list_artifacts().await?; + + let relevant_entry = list + .iter() + .find(|artifact| artifact.name == artifact_name) + .ok_or_else(|| anyhow!("Failed to find artifact by name {artifact_name}."))?; + + let items = client.get_container_items(relevant_entry).await?; + dbg!(&items); + Ok(Self { client, artifact_name, info: relevant_entry.clone(), items }) + } + + pub async fn download_file_item(&self, file: &FileToDownload) -> Result { + let span = info_span!("Downloading file from artifact", url = %file.remote_source_location, target = %file.target.display()); + async move { + let stream = + self.client.download_container_item(file.remote_source_location.clone()).await?; + crate::fs::tokio::copy_to_file(stream, &file.target).await?; + Ok(()) + } + .instrument(span) + .await + } + + pub async fn download_all_to(&self, root_path: &Path) -> Result { + for item in &self.items { + match item.item_type { + ItemType::File => { + let file = FileToDownload::new_to_subtree(root_path, item)?; + self.download_file_item(&file).await?; + } + ItemType::Folder => { + create_dir_all(root_path.join(item.relative_path())).await?; + } + } + } + Ok(()) + } + + pub fn file_items(&self) -> impl Iterator { + self.items.iter().filter(|entry| entry.item_type == ItemType::File) + } +} + + +#[derive(Clone, Debug)] +pub struct FileToDownload { + /// Absolute path in the local filesystem. + pub target: PathBuf, + /// Relative path within the artifact container. Does not include the leading segment with the + /// artifact name. + pub remote_source_location: Url, +} + +impl FileToDownload { + #[context("Failed to process entry {} from the artifact container.", entry.path.display())] + pub fn new_to_subtree(target_root: impl AsRef, entry: &ContainerEntry) -> Result { + Ok(Self { + target: target_root.as_ref().join(entry.relative_path()), + remote_source_location: entry.content_location.clone(), + }) + } +} + +pub fn headers() -> HeaderMap { + let mut header = HeaderMap::new(); + // We can safely unwrap, because we know that all mime types are in format that can be used + // as HTTP header value. + header.insert(ACCEPT_ENCODING, HeaderValue::from_static("gzip")); + header.insert( + ACCEPT, + HeaderValue::try_from(format!( + "{};api-version={}", + mime::APPLICATION_OCTET_STREAM, + API_VERSION + )) + .unwrap(), + ); + header +} diff --git a/build/ci_utils/src/actions/artifacts/models.rs b/build/ci_utils/src/actions/artifacts/models.rs new file mode 100644 index 0000000000..c109f2a393 --- /dev/null +++ b/build/ci_utils/src/actions/artifacts/models.rs @@ -0,0 +1,141 @@ +use crate::prelude::*; + +use chrono::DateTime; +use chrono::Utc; + + + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "PascalCase")] // Sic! +pub struct CreateArtifactRequest { + r#type: String, + name: String, + // GH Actions server does not support deserializing optional fields that are described as + // `null`. + #[serde(skip_serializing_if = "Option::is_none")] + retention_days: Option, +} + +impl CreateArtifactRequest { + pub fn new(name: impl Into, retention_days: Option) -> Self { + CreateArtifactRequest { + r#type: "actions_storage".to_string(), + name: name.into(), + retention_days, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] // Sic! +pub struct CreateArtifactResponse { + pub container_id: u64, + pub size: i64, // must be signed, as -1 is used as a placeholder + pub signed_content: Option, + pub file_container_resource_url: Url, + pub r#type: String, + pub name: String, + pub url: Url, + pub expires_on: String, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] // Sic! +pub struct UploadFileQuery { + pub file: String, + pub resource_url: Url, + pub max_chunk_size: i64, + pub continue_on_error: bool, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +#[serde(rename_all = "PascalCase")] // Sic! +pub struct PatchArtifactSize { + pub size: usize, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] // Sic! +pub struct PatchArtifactSizeResponse { + pub container_id: u64, + pub size: i64, + pub signed_content: Option, + pub r#type: String, + pub name: String, + pub url: Url, + // This is not actually present, despite what GH sources say. + // pub upload_url: Url, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ListArtifactsResponse { + pub count: i64, + pub value: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ArtifactResponse { + pub container_id: u64, + pub size: i64, + pub signed_content: Option, + pub file_container_resource_url: Url, + pub r#type: String, + pub name: String, + pub url: Url, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct QueryArtifactResponse { + pub count: i64, + pub value: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ContainerEntry { + pub container_id: u64, + pub scope_identifier: Uuid, + pub path: PathBuf, + pub item_type: ItemType, + pub status: EntryStatus, + pub file_length: Option, + pub file_encoding: Option, + pub file_type: Option, + pub date_created: DateTime, + pub date_last_modified: DateTime, + pub created_by: Uuid, + pub last_modified_by: Uuid, + pub item_location: Url, + pub content_location: Url, + pub file_id: Option, + pub content_id: String, +} + +impl ContainerEntry { + pub fn relative_path(&self) -> PathBuf { + //ensure!(self.path.is_relative(), "Path {} is not relative.", self.path.display()); + // First part is artifact name. + let path_iter = self.path.iter().skip(1); + // ensure!( + // path_iter.next() == Some(&OsStr::new(artifact_name)), + // "Entry path does not start with an artifact name." + // ); + PathBuf::from_iter(path_iter) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum EntryStatus { + Created, + PendingUpload, + // No other values known at this point. +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum ItemType { + File, + Folder, +} diff --git a/build/ci_utils/src/actions/artifacts/raw.rs b/build/ci_utils/src/actions/artifacts/raw.rs new file mode 100644 index 0000000000..06661a0480 --- /dev/null +++ b/build/ci_utils/src/actions/artifacts/raw.rs @@ -0,0 +1,258 @@ +use crate::prelude::*; + +use crate::actions::artifacts::models::ArtifactResponse; +use crate::actions::artifacts::models::CreateArtifactRequest; +use crate::actions::artifacts::models::CreateArtifactResponse; +use crate::actions::artifacts::models::ListArtifactsResponse; +use crate::actions::artifacts::models::PatchArtifactSize; +use crate::actions::artifacts::models::PatchArtifactSizeResponse; +use crate::actions::artifacts::models::QueryArtifactResponse; +use crate::reqwest::ContentRange; + +use anyhow::Context; +use bytes::BytesMut; +use reqwest::header::HeaderMap; +use reqwest::Body; +use reqwest::Response; +use reqwest::StatusCode; +use serde::de::DeserializeOwned; +use tokio::io::AsyncReadExt; + + + +pub mod endpoints { + use super::*; + use reqwest::header::HeaderValue; + use std::pin::Pin; + use tokio::io::AsyncRead; + + /// Creates a file container for the new artifact in the remote blob storage/file service. + /// + /// Returns the response from the Artifact Service if the file container was successfully + /// create. + #[context("Failed to create a file container for the new artifact `{}`.", artifact_name.as_ref())] + pub async fn create_container( + json_client: &reqwest::Client, + artifact_url: Url, + artifact_name: impl AsRef, + ) -> Result { + let body = CreateArtifactRequest::new(artifact_name.as_ref(), None); + // + // dbg!(&self.json_client); + // dbg!(serde_json::to_string(&body)?); + let request = json_client.post(artifact_url).json(&body).build()?; + + // dbg!(&request); + // TODO retry + let response = json_client.execute(request).await?; + // dbg!(&response); + // let status = response.status(); + check_response_json(response, |status, err| match status { + StatusCode::FORBIDDEN => err.context( + "Artifact storage quota has been hit. Unable to upload any new artifacts.", + ), + StatusCode::BAD_REQUEST => err.context(format!( + "Server rejected the request. Is the artifact name {} valid?", + artifact_name.as_ref() + )), + _ => err, + }) + .await + } + + pub async fn upload_file_chunk( + client: &reqwest::Client, + upload_url: Url, + body: impl Into, + range: ContentRange, + remote_path: impl AsRef, + ) -> Result { + use path_slash::PathExt; + let body = body.into(); + let response = client + .put(upload_url) + .query(&[("itemPath", remote_path.as_ref().to_slash_lossy())]) + .header(reqwest::header::CONTENT_LENGTH, range.len()) + .header(reqwest::header::CONTENT_RANGE, &range) + .body(body) + .send() + .await?; + + check_response(response, |_, e| e).await?; + Ok(range.len()) + } + + #[context("Failed to list artifacts for the current run.")] + pub async fn list_artifacts( + json_client: &reqwest::Client, + artifact_url: Url, + ) -> Result> { + Ok(json_client.get(artifact_url).send().await?.json::().await?.value) + } + + #[context("Getting container items of artifact {}.", artifact_name.as_ref())] + pub async fn get_container_items( + json_client: &reqwest::Client, + container_url: Url, + artifact_name: impl AsRef, + ) -> Result { + let body = json_client + .get(container_url) + .query(&item_path_query(&artifact_name.as_ref())) + .send() + .await? + .json::() + .await?; + debug!("{}", serde_json::to_string_pretty(&body)?); + serde_json::from_value(body).anyhow_err() + } + + #[context("Failed to finalize upload of the artifact `{}`.", artifact_name.as_ref())] + pub async fn patch_artifact_size( + json_client: &reqwest::Client, + artifact_url: Url, + artifact_name: impl AsRef, + size: usize, + ) -> Result { + debug!("Patching the artifact `{}` size.", artifact_name.as_ref()); + let artifact_url = artifact_url.clone(); + + let patch_request = json_client + .patch(artifact_url.clone()) + .query(&[("artifactName", artifact_name.as_ref())]) // OsStr can be passed here, fails runtime + .json(&PatchArtifactSize { size }); + + // TODO retry + let response = patch_request.send().await?; + Ok(response.json().await?) + } + + pub async fn download_item( + bin_client: &reqwest::Client, + artifact_location: Url, + ) -> Result>> { + // debug!("Downloading {} to {}.", artifact_location, destination.as_ref().display()); + // let file = tokio::fs::File::create(destination); + + let response = crate::io::web::execute(bin_client.get(artifact_location)).await?; + // let expected_size = decode_content_length(response.headers()); + let is_gzipped = response + .headers() + .get(reqwest::header::ACCEPT_ENCODING) + .contains(&HeaderValue::from_static("gzip")); + + let reader = crate::io::web::async_reader(response); + if is_gzipped { + let decoded_stream = async_compression::tokio::bufread::GzipDecoder::new(reader); + Ok(Box::pin(decoded_stream) as Pin>) + // tokio::io::copy(&mut decoded_stream, &mut file.await?).await?; + } else { + Ok(Box::pin(reader) as Pin>) + // tokio::io::copy(&mut reader, &mut destination).await?; + } + } +} + +pub fn decode_content_length(headers: &HeaderMap) -> Option { + let value = headers.get(reqwest::header::CONTENT_LENGTH)?; + let text = value.to_str().ok()?; + text.parse::().ok() +} + +#[context("Failed to upload the file '{}' to path '{}'.", local_path.as_ref().display(), remote_path.as_ref().display())] +#[instrument(skip_all, err, fields(local_path = %local_path.as_ref().display(), remote_path = %remote_path.as_ref().display(), %upload_url))] +pub async fn upload_file( + client: &reqwest::Client, + chunk_size: usize, + upload_url: Url, + local_path: impl AsRef, + remote_path: impl AsRef, +) -> Result { + let file = tokio::fs::File::open(local_path.as_ref()).await?; + // TODO [mwu] note that metadata can lie about file size, e.g. named pipes on Linux + let len = file.metadata().await?.len() as usize; + trace!( + "Will upload file {} of size {} to remote path {}", + local_path.as_ref().display(), + len, + remote_path.as_ref().display() + ); + if len < chunk_size && len > 0 { + let range = ContentRange::whole(len); + endpoints::upload_file_chunk(client, upload_url.clone(), file, range, &remote_path).await + } else { + let mut chunks = stream_file_in_chunks(file, chunk_size).boxed(); + let mut current_position = 0; + loop { + let chunk = match chunks.try_next().await? { + Some(chunk) => chunk, + None => break, + }; + + let read_bytes = chunk.len(); + let range = ContentRange { + range: current_position..=current_position + read_bytes.saturating_sub(1), + total: Some(len), + }; + endpoints::upload_file_chunk(client, upload_url.clone(), chunk, range, &remote_path) + .await?; + current_position += read_bytes; + } + Ok(current_position) + } +} + +pub async fn check_response_json( + response: Response, + additional_context: impl FnOnce(StatusCode, anyhow::Error) -> anyhow::Error, +) -> Result { + let data = check_response(response, additional_context).await?; + serde_json::from_slice(data.as_ref()).context(anyhow!( + "Failed to deserialize response body as {}. Body was: {:?}", + std::any::type_name::(), + data, + )) +} +pub async fn check_response( + response: Response, + additional_context: impl FnOnce(StatusCode, anyhow::Error) -> anyhow::Error, +) -> Result { + // dbg!(&response); + let status = response.status(); + if !status.is_success() { + let mut err = anyhow!("Server replied with status {}.", status); + + let body = response + .bytes() + .await + .map_err(|e| anyhow!("Also failed to obtain the response body: {}", e))?; + + if let Ok(body_text) = std::str::from_utf8(body.as_ref()) { + err = err.context(format!("Error response body was: {}", body_text)); + } + + let err = additional_context(status, err); + Err(err) + } else { + response.bytes().await.context("Failed to read the response body.") + } +} + +pub fn stream_file_in_chunks( + file: tokio::fs::File, + chunk_size: usize, +) -> impl Stream> + Send { + futures::stream::try_unfold(file, async move |mut file| { + let mut buffer = BytesMut::with_capacity(chunk_size); + while file.read_buf(&mut buffer).await? > 0 && buffer.len() < chunk_size {} + if buffer.is_empty() { + Ok::<_, anyhow::Error>(None) + } else { + Ok(Some((buffer.freeze(), file))) + } + }) +} + +pub fn item_path_query(artifact_name: impl Serialize) -> impl Serialize { + [("itemPath", artifact_name)] +} diff --git a/build/ci_utils/src/actions/artifacts/run_session.rs b/build/ci_utils/src/actions/artifacts/run_session.rs new file mode 100644 index 0000000000..0580d5d206 --- /dev/null +++ b/build/ci_utils/src/actions/artifacts/run_session.rs @@ -0,0 +1,86 @@ +use crate::prelude::*; + +use crate::actions::artifacts::context::Context; +use crate::actions::artifacts::models::ArtifactResponse; +use crate::actions::artifacts::models::ContainerEntry; +use crate::actions::artifacts::models::CreateArtifactResponse; +use crate::actions::artifacts::models::PatchArtifactSizeResponse; +use crate::actions::artifacts::raw; + +use reqwest::Client; +use tokio::io::AsyncRead; + + + +#[derive(Clone, Debug)] +pub struct SessionClient { + pub json_client: Client, + pub upload_client: Client, + pub download_client: Client, + pub artifact_url: Url, +} + +impl SessionClient { + pub async fn create_container( + &self, + artifact_name: impl AsRef, + ) -> Result { + raw::endpoints::create_container( + &self.json_client, + self.artifact_url.clone(), + artifact_name, + ) + .await + } + + pub async fn list_artifacts(&self) -> Result> { + raw::endpoints::list_artifacts(&self.json_client, self.artifact_url.clone()).await + } + + pub fn new(context: &Context) -> Result { + Ok(Self { + json_client: context.json_client()?, + upload_client: context.upload_client()?, + artifact_url: context.artifact_url()?, + download_client: context.download_client()?, + }) + } + + pub fn new_from_env() -> Result { + Self::new(&Context::new_from_env()?) + } + + pub async fn patch_artifact_size( + &self, + artifact_name: &str, + total_size: usize, + ) -> Result { + raw::endpoints::patch_artifact_size( + &self.json_client, + self.artifact_url.clone(), + artifact_name, + total_size, + ) + .await + } + + pub async fn get_container_items( + &self, + artifact: &ArtifactResponse, + ) -> Result> { + Ok(crate::actions::artifacts::raw::endpoints::get_container_items( + &self.json_client, + artifact.file_container_resource_url.clone(), + &artifact.name, + ) + .await? + .value) + } + + pub async fn download_container_item( + &self, + content_location: Url, + ) -> Result { + raw::endpoints::download_item(&self.download_client, content_location).await + } +} diff --git a/build/ci_utils/src/actions/artifacts/upload.rs b/build/ci_utils/src/actions/artifacts/upload.rs new file mode 100644 index 0000000000..e15707ae45 --- /dev/null +++ b/build/ci_utils/src/actions/artifacts/upload.rs @@ -0,0 +1,288 @@ +use crate::prelude::*; + +use crate::actions::artifacts::models::PatchArtifactSizeResponse; +use crate::actions::artifacts::raw; +use crate::actions::artifacts::run_session::SessionClient; +use crate::global; + +use anyhow::Context; +use reqwest::Client; +use std::sync::atomic::Ordering; + + + +#[derive(Clone, Copy, Debug)] +pub struct UploadOptions { + pub file_concurrency: usize, + pub chunk_size: usize, + // by default, file uploads will continue if there is an error unless specified differently in + // the options + pub continue_on_error: bool, +} + +impl Default for UploadOptions { + fn default() -> Self { + UploadOptions { + chunk_size: 8 * 1024 * 1024, + file_concurrency: 10, + continue_on_error: true, + } + } +} + +#[derive(Debug)] +pub struct ArtifactUploader { + pub client: SessionClient, + pub artifact_name: String, + pub upload_url: Url, + pub total_size: std::sync::atomic::AtomicUsize, + pub cancel: tokio_util::sync::CancellationToken, +} + +impl ArtifactUploader { + pub async fn new(client: SessionClient, artifact_name: impl Into) -> Result { + let artifact_name = artifact_name.into(); + let container = client.create_container(&artifact_name).await?; + info!("Created a container {} for artifact '{}'.", container.container_id, artifact_name); + Ok(Self { + client, + artifact_name, + upload_url: container.file_container_resource_url, + total_size: default(), + cancel: default(), + }) + } + + + pub fn uploader(&self, options: &UploadOptions) -> FileUploader { + FileUploader { + url: self.upload_url.clone(), + client: self.client.upload_client.clone(), + artifact_name: PathBuf::from(&self.artifact_name), + chunk_size: options.chunk_size, + } + } + + /// Concurrently upload all of the files in chunks. + pub async fn upload_artifact_to_file_container( + &self, + files_to_upload: impl Stream + Send + 'static, + options: &UploadOptions, + ) -> Result { + debug!( + "File Concurrency: {}, and Chunk Size: {}. URL: {}", + options.file_concurrency, options.chunk_size, self.upload_url + ); + + let (work_tx, work_rx) = flume::unbounded(); + let (result_tx, result_rx) = flume::unbounded(); + + tokio::task::spawn(async move { + debug!("Spawned the file discovery worker."); + files_to_upload + .inspect(|f| debug!("File {} discovered for upload.", f.local_path.display())) + .map(Ok) + .forward(work_tx.into_sink()) + .await + .unwrap(); + debug!("File discovery complete."); + }); + + for index in 0..options.file_concurrency { + let span = debug_span!("Upload worker", index).entered(); + let worker_task = upload_worker( + self.cancel.clone(), + work_rx.clone(), + self.uploader(options), + result_tx.clone(), + ) + .map(Result::Ok); + debug!("Spawning the worker task."); + global::spawn(format!("uploader {index}"), worker_task.instrument(span.exit())); + } + + drop(result_tx); + + let results = result_rx.into_stream().collect::>().await; + let uploaded_size = results.iter().fold(0, |acc, r| acc + r.total_size); + debug!("Uploaded in total {} bytes.", uploaded_size); + self.total_size.fetch_add(uploaded_size, Ordering::SeqCst); + let errors = results.into_iter().filter_map(|r| r.result.err()).collect_vec(); + if !errors.is_empty() { + let mut error = anyhow!( + "Not all file uploads were successful. Encountered {} errors: {:#?}", + errors.len(), + errors + ); + for cause in errors { + error = error.context(cause); + } + Err(error) + } else { + Ok(()) + } + } + + pub async fn patch_artifact_size(&self) -> Result { + let total_size = self.total_size.load(Ordering::SeqCst); + self.client.patch_artifact_size(&self.artifact_name, total_size).await + } +} + +pub async fn upload_worker( + cancellation_token: tokio_util::sync::CancellationToken, + job_receiver: flume::Receiver, + uploader: FileUploader, + result_sender: flume::Sender, +) { + debug!("Upload worker spawned."); + let mut job_receiver = job_receiver.into_stream(); + loop { + trace!("Waiting for input."); + let mut on_cancelled = pin!(cancellation_token.cancelled().fuse()); + select! { + _ = on_cancelled => { + debug!("Upload worker has been cancelled."); + break; + }, + (job, tail) = job_receiver.into_future() => { + job_receiver = tail; + trace!("Got job: {job:?}."); + match job { + Some(job) => { + let result = uploader.upload_file(&job).await; + result_sender.send(result).unwrap(); + } + None => { + debug!("Upload worker completed all available work."); + break; + } + } + trace!("Job complete."); + } + complete => { + trace!("Complete."); + break; + }, + } + } + debug!("Upload worker finished."); +} + +#[derive(Derivative)] +#[derivative(Debug)] +pub struct FileUploader { + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub url: Url, + #[derivative(Debug = "ignore")] + pub client: Client, + pub artifact_name: PathBuf, + pub chunk_size: usize, +} + +impl FileUploader { + pub async fn upload_file(&self, file_to_upload: &FileToUpload) -> UploadResult { + let uploading_res = raw::upload_file( + &self.client, + self.chunk_size, + self.url.clone(), + &file_to_upload.local_path, + self.artifact_name.join(&file_to_upload.remote_path), + ) + .await; + match uploading_res { + Ok(len) => UploadResult { + result: Ok(()), + total_size: len, + successful_upload_size: len, + }, + Err(e) => UploadResult { + result: Err(e), + total_size: 0, + successful_upload_size: 0, + }, + } + } +} + +#[derive(Clone, Debug)] +pub struct FileToUpload { + /// Absolute path in the local filesystem. + pub local_path: PathBuf, + /// Relative path within the artifact container. Does not include the leading segment with the + /// artifact name. + pub remote_path: PathBuf, +} + +impl FileToUpload { + pub fn new_in_root(path: impl Into) -> Result { + let local_path = path.into(); + let remote_path = local_path.file_name().map(into).ok_or_else(|| { + anyhow!("Path {} does not contain a valid filename.", local_path.display()) + })?; + Ok(Self { local_path, remote_path }) + } + + pub fn new_relative( + root_path: impl AsRef, + local_path: impl Into, + ) -> Result { + let local_path = local_path.into(); + Ok(FileToUpload { + remote_path: local_path + .strip_prefix(&root_path) + .context(format!( + "Failed to strip prefix {} from path {}.", + root_path.as_ref().display(), + local_path.display() + ))? + .to_path_buf(), + local_path, + }) + } +} + +#[derive(Debug)] +pub struct UploadResult { + pub result: Result, + pub successful_upload_size: usize, + pub total_size: usize, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::actions::artifacts; + use crate::actions::artifacts::models::CreateArtifactResponse; + use crate::log::setup_logging; + + #[tokio::test] + #[ignore] + async fn test_upload() -> Result { + use warp::Filter; + setup_logging()?; + + let response1 = CreateArtifactResponse { + name: "test-artifact".to_string(), + url: "http://localhost:8080/artifacts/test-artifact".try_into()?, + container_id: 1, + size: 0, + file_container_resource_url: "http://localhost:8080/artifacts/test-artifact/files" + .try_into()?, + r#type: "file".to_string(), + expires_on: default(), + signed_content: None, + }; + + let routes = warp::any().map(move || serde_json::to_string(&response1).unwrap()); + tokio::spawn(warp::serve(routes).run(([127, 0, 0, 1], 8080))); + + debug!("Hello!"); + std::env::set_var("ACTIONS_RUNTIME_URL", "http://localhost:8080"); + std::env::set_var("ACTIONS_RUNTIME_TOKEN", "test-token"); + std::env::set_var("GITHUB_RUN_ID", "123"); + let result = artifacts::upload_single_file("file", "name").await; + dbg!(result)?; + Ok(()) + } +} diff --git a/build/ci_utils/src/actions/context.rs b/build/ci_utils/src/actions/context.rs new file mode 100644 index 0000000000..764f9f8beb --- /dev/null +++ b/build/ci_utils/src/actions/context.rs @@ -0,0 +1,83 @@ +#[allow(unused_imports)] +use crate::prelude::*; + +use octocrab::models; + + + +/// Corresponds to https://github.com/actions/toolkit/blob/main/packages/github/src/interfaces.ts +#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] +pub struct WebhookPayload { + pub repository: Option, + pub issue: Option, + pub pull_request: Option, + pub sender: Option, + pub action: Option, + pub installation: Option, + pub comment: Option, +} + +/// Corresponds to https://github.com/actions/toolkit/blob/main/packages/github/src/context.ts +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct Context { + pub payload: WebhookPayload, + pub event_name: String, + pub sha: String, + pub r#ref: String, + pub workflow: String, + pub action: String, + pub actor: String, + pub job: String, + pub run_number: usize, + pub run_id: models::RunId, + pub api_url: Url, + pub server_url: Url, + pub graphql_url: Url, +} + +impl Context { + /// Creates a new context from the environment. + pub fn from_env() -> Result { + let payload: WebhookPayload = + if let Ok(event_path) = crate::actions::env::GITHUB_EVENT_PATH.get() { + event_path.read_to_json()? + } else { + default() + }; + let event_name = crate::actions::env::GITHUB_EVENT_NAME.get()?; + let sha = crate::actions::env::GITHUB_SHA.get()?; + let r#ref = crate::actions::env::GITHUB_REF.get()?; + let workflow = crate::actions::env::GITHUB_WORKFLOW.get()?; + let action = crate::actions::env::GITHUB_ACTION.get()?; + let actor = crate::actions::env::GITHUB_ACTOR.get()?; + let job = crate::actions::env::GITHUB_JOB.get()?; + // GitHub Actions defaults run_number and run_id to 10 if they are not set. + // I am not sure why, for now I chose not to follow this pattern. + let run_number = crate::actions::env::GITHUB_RUN_NUMBER.get()?; + let run_id = crate::actions::env::GITHUB_RUN_ID.get()?; + let api_url = crate::actions::env::GITHUB_API_URL + .get() + .or_else(|_| Url::from_str("https://api.github.com"))?; + let server_url = crate::actions::env::GITHUB_SERVER_URL + .get() + .or_else(|_| Url::from_str("https://github.com"))?; + let graphql_url = crate::actions::env::GITHUB_GRAPHQL_URL + .get() + .or_else(|_| Url::from_str("https://api.github.com/graphql"))?; + Ok(Self { + payload, + event_name, + sha, + r#ref, + workflow, + action, + actor, + job, + run_number, + run_id, + api_url, + server_url, + graphql_url, + }) + } +} diff --git a/build/ci_utils/src/actions/env.rs b/build/ci_utils/src/actions/env.rs new file mode 100644 index 0000000000..3e4a9a3024 --- /dev/null +++ b/build/ci_utils/src/actions/env.rs @@ -0,0 +1,168 @@ +//! See: https://docs.github.com/en/actions/learn-github-actions/environment-variables + +use crate::prelude::*; + +use crate::define_env_var; +use crate::models::config::RepoContext; + + + +define_env_var! { + /// Always set to true when being run under GitHub Actions runner. Also, this is often set on + /// other CI systems. + CI, bool; + + /// The name of the action currently running, or the id of a step. For example, for an action, + /// `__repo-owner_name-of-action-repo`. + GITHUB_ACTION, String; + + /// The path where an action is located. This property is only supported in composite actions. + /// You can use this path to access files located in the same repository as the action. + /// For example, `/home/runner/work/_actions/repo-owner/name-of-action-repo/v1`. + GITHUB_ACTION_PATH, PathBuf; + + /// For a step executing an action, this is the owner and repository name of the action. + /// For example, `actions/checkout`. + GITHUB_ACTION_REPOSITORY, RepoContext; + + /// Always set to true when GitHub Actions is running the workflow. You can use this variable + /// to differentiate when tests are being run locally or by GitHub Actions. + GITHUB_ACTIONS, bool; + + /// The name of the person or app that initiated the workflow. For example, `octocat`. + GITHUB_ACTOR, String; + + /// Returns the API URL. For example: https://api.github.com. + GITHUB_API_URL, Url; + + /// The name of the base ref or target branch of the pull request in a workflow run. This is + /// only set when the event that triggers a workflow run is either `pull_request` or + /// `pull_request_target`. For example, `main`. + GITHUB_BASE_REF, String; + + /// The path on the runner to the file that sets environment variables from workflow commands. + /// This file is unique to the current step and changes for each step in a job. For example, + /// `/home/runner/work/_temp/_runner_file_commands/set_env_87406d6e-4979-4d42-98e1-3dab1f48b13a`. + GITHUB_ENV, PathBuf; + + /// The name of the event that triggered the workflow. For example, `workflow_dispatch`. + GITHUB_EVENT_NAME, String; + + /// The path to the file on the runner that contains the full event webhook payload. + /// For example, `/github/workflow/event.json`. + GITHUB_EVENT_PATH, PathBuf; + + /// Returns the GraphQL API URL. For example: https://api.github.com/graphql. + GITHUB_GRAPHQL_URL, Url; + + /// The head ref or source branch of the pull request in a workflow run. This property is only + /// set when the event that triggers a workflow run is either `pull_request` or + /// `pull_request_target`. For example, `feature-branch-1`. + GITHUB_HEAD_REF, String; + + /// The job_id of the current job. For example, greeting_job. + GITHUB_JOB, String; + + /// The path on the runner to the file that sets system PATH variables from workflow commands. + /// This file is unique to the current step and changes for each step in a job. For example, + /// /home/runner/work/_temp/_runner_file_commands/add_path_899b9445-ad4a-400c-aa89-249f18632cf5. + GITHUB_PATH, PathBuf; + + /// The fully-formed ref of the branch or tag that triggered the workflow run. For workflows + /// triggered by push, this is the branch or tag ref that was pushed. For workflows triggered + /// by pull_request, this is the pull request merge branch. For workflows triggered by release, + /// this is the release tag created. For other triggers, this is the branch or tag ref that + /// triggered the workflow run. This is only set if a branch or tag is available for the event + /// type. The ref given is fully-formed, meaning that for branches the format is + /// `refs/heads/`, for pull requests it is `refs/pull//merge`, and for + /// tags it is `refs/tags/`. For example, `refs/heads/feature-branch-1`. + GITHUB_REF, String; + + /// The short ref name of the branch or tag that triggered the workflow run. This value matches + /// the branch or tag name shown on GitHub. For example, `feature-branch-1`. + GITHUB_REF_NAME, String; + + /// true if branch protections are configured for the ref that triggered the workflow run. + GITHUB_REF_PROTECTED, bool; + + /// The type of ref that triggered the workflow run. Valid values are `branch` or `tag`. + GITHUB_REF_TYPE, String; + + /// The owner and repository name. For example, octocat/Hello-World. + GITHUB_REPOSITORY, RepoContext; + + /// The repository owner's name. For example, octocat. + GITHUB_REPOSITORY_OWNER, String; + + /// The number of days that workflow run logs and artifacts are kept. For example, 90. + GITHUB_RETENTION_DAYS, usize; + + /// A unique number for each attempt of a particular workflow run in a repository. This number + /// begins at 1 for the workflow run's first attempt, and increments with each re-run. For + /// example, 3. + GITHUB_RUN_ATTEMPT, usize; + + /// A unique number for each workflow run within a repository. This number does not change if + /// you re-run the workflow run. For example, 1658821493. + GITHUB_RUN_ID, octocrab::models::RunId; + + /// A unique number for each run of a particular workflow in a repository. This number begins + /// at 1 for the workflow's first run, and increments with each new run. This number does not + /// change if you re-run the workflow run. For example, 3. + GITHUB_RUN_NUMBER, usize; + + /// The URL of the GitHub server. For example: https://github.com. + GITHUB_SERVER_URL, Url; + + /// The commit SHA that triggered the workflow. The value of this commit SHA depends on the + /// event that triggered the workflow. For more information, see "Events that trigger + /// workflows." For example, `ffac537e6cbbf934b08745a378932722df287a53`. + GITHUB_SHA, String; + + /// The path on the runner to the file that contains job summaries from workflow commands. + /// This file is unique to the current step and changes for each step in a job. For example, + /// `/home/rob/runner/_layout/_work/_temp/_runner_file_commands/step_summary_1cb22d7f-5663-41a8-9ffc-13472605c76c`. + /// For more information, see "Workflow commands for GitHub Actions." + GITHUB_STEP_SUMMARY, String; + + /// The name of the workflow. For example, `My test workflow`. If the workflow file doesn't + /// specify a name, the value of this variable is the full path of the workflow file in the + /// repository. + GITHUB_WORKFLOW, String; + + /// The default working directory on the runner for steps, and the default location of your + /// repository when using the checkout action. For example, + /// `/home/runner/work/my-repo-name/my-repo-name`. + GITHUB_WORKSPACE, PathBuf; + + /// The architecture of the runner executing the job. + /// Possible values are `X86`, `X64`, `ARM`, or `ARM64`. + RUNNER_ARCH, String; + + /// This is set only if debug logging is enabled, and always has the value of 1. It can be + /// useful as an indicator to enable additional debugging or verbose logging in your own job + /// steps. + RUNNER_DEBUG, usize; + + /// The name of the runner executing the job. For example, `Hosted Agent` + RUNNER_NAME, String; + + /// The operating system of the runner executing the job. Possible values are `Linux`, + /// `Windows`, or `macOS`. For example, `Windows`. + RUNNER_OS, String; + + /// The path to a temporary directory on the runner. This directory is emptied at the beginning + /// and end of each job. Note that files will not be removed if the runner's user account does + /// not have permission to delete them. For example, `D:\a\_temp` + RUNNER_TEMP, PathBuf; + + /// The path to the directory containing preinstalled tools for GitHub-hosted runners. + /// For example, `C:\hostedtoolcache\windows` + RUNNER_TOOL_CACHE, PathBuf; +} + +/// Fails when called outside of GitHub Actions environment, +pub fn is_self_hosted() -> Result { + let name = RUNNER_NAME.get_raw()?; + Ok(!name.starts_with("GitHub Actions")) +} diff --git a/build/ci_utils/src/actions/workflow.rs b/build/ci_utils/src/actions/workflow.rs new file mode 100644 index 0000000000..c007ec7ca1 --- /dev/null +++ b/build/ci_utils/src/actions/workflow.rs @@ -0,0 +1,104 @@ +use crate::prelude::*; + +use crate::actions::env; + +use std::io::Write; + + +// ============== +// === Export === +// ============== + +pub mod definition; + + + +/// Check if we are running in an environment that looks like being spawned by GitHub Actions +/// workflow. +pub fn is_in_env() -> bool { + env::GITHUB_ACTIONS.get().contains(&true) +} + +/// Sets an action's output parameter. +/// +/// See: +pub fn set_output(name: &str, value: &impl ToString) { + let value = value.to_string(); + debug!("Setting GitHub Actions step output {name} to {value}"); + println!("::set-output name={name}::{value}"); +} + +/// Prints a debug message to the log. +/// +/// You must create a secret named `ACTIONS_STEP_DEBUG` with the value `true` to see the debug +/// messages set by this command in the log. +/// +/// See: +pub fn debug(message: &str) { + println!("::debug::{message}") +} + +/// Creates or updates an environment variable for any steps running next in a job. +/// +/// This step and all subsequent steps in a job will have access to the variable. Environment +/// variables are case-sensitive and you can include punctuation. +/// +/// Just logs and sets variable locally if used under non-GH CI. +pub fn set_env(name: &str, value: &impl ToString) -> Result { + let value_string = value.to_string(); + debug!("Will try writing Github Actions environment variable: {name}={value_string}"); + std::env::set_var(name, value.to_string()); + if is_in_env() { + let env_file = env::GITHUB_ENV.get()?; + let mut file = std::fs::OpenOptions::new().create_new(false).append(true).open(env_file)?; + writeln!(file, "{name}={value_string}")?; + } + Ok(()) +} + +pub fn mask_text(text: impl AsRef) { + if is_in_env() { + println!("::add-mask::{}", text.as_ref()) + } +} + +pub fn mask_value(value: impl Display) { + if is_in_env() { + println!("::add-mask::{value}") + } +} + +pub fn mask_environment_variable(variable_name: impl AsRef) -> Result { + mask_value(std::env::var(variable_name)?); + Ok(()) +} + +#[derive(Clone, Copy, Debug, strum::Display)] +#[strum(serialize_all = "snake_case")] +pub enum MessageLevel { + Debug, + Notice, + Warning, + Error, +} + +#[derive(Clone, Debug)] +pub struct Message { + pub level: MessageLevel, + pub text: String, + // TODO title, line, column +} + +impl Message { + pub fn notice(text: impl AsRef) { + Message { level: MessageLevel::Notice, text: text.as_ref().into() }.send() + } + + pub fn send(&self) { + println!("::{} ::{}", self.level, self.text); + } +} + +pub fn message(level: MessageLevel, text: impl AsRef) { + Message { level, text: text.as_ref().into() }.send() +} diff --git a/build/ci_utils/src/actions/workflow/definition.rs b/build/ci_utils/src/actions/workflow/definition.rs new file mode 100644 index 0000000000..117c10c52a --- /dev/null +++ b/build/ci_utils/src/actions/workflow/definition.rs @@ -0,0 +1,766 @@ +use crate::prelude::*; + +use crate::env::new::RawVariable; + +use heck::ToKebabCase; +use std::collections::BTreeMap; +use std::collections::BTreeSet; + + + +pub fn wrap_expression(expression: impl AsRef) -> String { + format!("${{{{ {} }}}}", expression.as_ref()) +} + +pub fn env_expression(environment_variable: &impl RawVariable) -> String { + wrap_expression(format!("env.{}", environment_variable.name())) +} + + +pub fn is_github_hosted() -> String { + "startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')".into() +} + +pub fn setup_conda() -> Step { + // use crate::actions::workflow::definition::step::CondaChannel; + Step { + name: Some("Setup conda (GH runners only)".into()), + uses: Some("s-weigand/setup-conda@v1.0.5".into()), + r#if: Some(is_github_hosted()), + with: Some(step::Argument::SetupConda { + update_conda: Some(false), + conda_channels: Some("anaconda, conda-forge".into()), + }), + ..default() + } +} + +pub fn setup_wasm_pack_step() -> Step { + Step { + name: Some("Installing wasm-pack".into()), + uses: Some("jetli/wasm-pack-action@v0.3.0".into()), + with: Some(step::Argument::Other(BTreeMap::from_iter([( + "version".into(), + "v0.10.2".into(), + )]))), + r#if: Some(is_github_hosted()), + ..default() + } +} + +pub fn github_script_step(name: impl Into, script: impl Into) -> Step { + Step { + name: Some(name.into()), + uses: Some("actions/github-script@v6".into()), + with: Some(step::Argument::GitHubScript { script: script.into() }), + ..default() + } +} + +pub fn setup_artifact_api() -> Step { + let script = r#" + core.exportVariable("ACTIONS_RUNTIME_TOKEN", process.env["ACTIONS_RUNTIME_TOKEN"]) + core.exportVariable("ACTIONS_RUNTIME_URL", process.env["ACTIONS_RUNTIME_URL"]) + core.exportVariable("GITHUB_RETENTION_DAYS", process.env["GITHUB_RETENTION_DAYS"]) + console.log(context) + "#; + github_script_step("Expose Artifact API and context information.", script) +} + +pub fn is_windows_runner() -> String { + "runner.os == 'Windows'".into() +} + +pub fn is_non_windows_runner() -> String { + "runner.os != 'Windows'".into() +} + +pub fn shell_os(os: OS, command_line: impl Into) -> Step { + Step { + run: Some(command_line.into()), + env: once(github_token_env()).collect(), + r#if: Some(format!("runner.os {} 'Windows'", if os == OS::Windows { "==" } else { "!=" })), + shell: Some(if os == OS::Windows { Shell::Pwsh } else { Shell::Bash }), + ..default() + } +} + +pub fn shell(command_line: impl Into) -> Step { + Step { run: Some(command_line.into()), env: once(github_token_env()).collect(), ..default() } +} + +/// Invoke our entry point to the build scripts, i.e. the `./run` script. +pub fn run(run_args: impl AsRef) -> Step { + shell(format!("./run {}", run_args.as_ref())) +} + +pub fn cancel_workflow_action() -> Step { + Step { + name: Some("Cancel Previous Runs".into()), + uses: Some("styfle/cancel-workflow-action@0.9.1".into()), + with: Some(step::Argument::Other(BTreeMap::from_iter([( + "access_token".into(), + "${{ github.token }}".into(), + )]))), + ..default() + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct JobId(String); + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case", untagged)] +pub enum Concurrency { + Plain(String), + Map { group: String, cancel_in_progress: bool }, +} + +impl Concurrency { + pub fn new(group_name: impl Into) -> Self { + Self::Plain(group_name.into()) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Workflow { + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + pub on: Event, + pub jobs: BTreeMap, + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub env: BTreeMap, + #[serde(skip_serializing_if = "Option::is_none")] + pub concurrency: Option, +} + +impl Default for Workflow { + fn default() -> Self { + let mut ret = Self { + name: default(), + description: default(), + on: default(), + jobs: default(), + env: default(), + concurrency: default(), + }; + // By default CI should never check program versions. + ret.env("ENSO_BUILD_SKIP_VERSION_CHECK", "true"); + ret + } +} + +impl Workflow { + pub fn new(name: impl Into) -> Self { + Self { name: name.into(), ..Default::default() } + } + + pub fn expose_outputs(&self, source_job_id: impl AsRef, consumer_job: &mut Job) { + let source_job = self.jobs.get(source_job_id.as_ref()).unwrap(); + consumer_job.use_job_outputs(source_job_id.as_ref(), source_job); + } +} + +impl Workflow { + pub fn add_job(&mut self, job: Job) -> String { + let key = job.name.to_kebab_case(); + self.jobs.insert(key.clone(), job); + key + } + + pub fn add(&mut self, os: OS) -> String { + self.add_customized::(os, |_| {}) + } + + pub fn add_customized(&mut self, os: OS, f: impl FnOnce(&mut Job)) -> String { + let (key, mut job) = J::entry(os); + f(&mut job); + self.jobs.insert(key.clone(), job); + key + } + + pub fn add_dependent( + &mut self, + os: OS, + needed: impl IntoIterator>, + ) -> String { + let (key, mut job) = J::entry(os); + for needed in needed { + self.expose_outputs(needed.as_ref(), &mut job); + } + self.jobs.insert(key.clone(), job); + key + } + + pub fn env(&mut self, var_name: impl Into, var_value: impl Into) { + self.env.insert(var_name.into(), var_value.into()); + } +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Push { + #[serde(flatten)] + pub inner_branches: Branches, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub tags: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub tags_ignore: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub paths: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub paths_ignore: Vec, +} + +/// Common branch-related fields between some event triggers. +/// +/// See: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onpull_requestpull_request_targetbranchesbranches-ignore +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Branches { + #[serde(skip_serializing_if = "Vec::is_empty")] + pub branches: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub branches_ignore: Vec, +} + +impl Branches { + pub fn new(branches: impl IntoIterator>) -> Self { + Self { branches: branches.into_iter().map(Into::into).collect(), ..default() } + } +} + +/// See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum PullRequestActivityType { + Assigned, + Unassigned, + Labeled, + Unlabeled, + Opened, + Edited, + Closed, + Reopened, + Synchronize, + ConvertedToDraft, + ReadyForReview, + Locked, + Unlocked, + ReviewRequested, + ReviewRequestRemoved, + AutoMergeEnabled, + AutoMergeDisabled, +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct PullRequest { + #[serde(flatten)] + pub inner_branches: Branches, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub types: Vec, +} + +impl PullRequest { + pub fn with_types( + mut self, + types: impl IntoIterator>, + ) -> Self { + self.types.extend(types.into_iter().map(Into::into)); + self + } +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Schedule { + pub cron: String, +} + +impl Schedule { + pub fn new(cron_text: impl Into) -> Result { + let cron = cron_text.into(); + // Check if the given string is a valid cron expression. + // let _ = cron::Schedule::from_str(cron_text.as_str())?; + Ok(Self { cron }) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "type")] +pub enum WorkflowDispatchInputType { + String { + #[serde(skip_serializing_if = "Option::is_none")] + default: Option, + }, + Choice { + #[serde(skip_serializing_if = "Option::is_none")] + default: Option, + choices: Vec, // should be non-empty + }, + Boolean { + #[serde(skip_serializing_if = "Option::is_none")] + default: Option, + }, + Environment { + #[serde(skip_serializing_if = "Option::is_none")] + default: Option, + }, +} + +impl Default for WorkflowDispatchInputType { + fn default() -> Self { + Self::String { default: None } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WorkflowDispatchInput { + /// A string description of the input parameter. + pub description: String, + /// A string shown to users using the deprecated input. + #[serde(skip_serializing_if = "Option::is_none")] + pub deprecation_message: Option, + /// A boolean to indicate whether the action requires the input parameter. Set to true when the + /// parameter is required. + pub required: bool, + /// A string representing the type of the input. + #[serde(flatten)] + pub r#type: WorkflowDispatchInputType, +} + +impl WorkflowDispatchInput { + pub fn new(description: impl Into, required: bool) -> Self { + Self { + description: description.into(), + deprecation_message: None, + required, + r#type: Default::default(), + } + } + + pub fn new_string( + description: impl Into, + required: bool, + default: impl Into, + ) -> Self { + Self { + r#type: WorkflowDispatchInputType::String { default: Some(default.into()) }, + ..Self::new(description, required) + } + } + + pub fn new_boolean(description: impl Into, required: bool, default: bool) -> Self { + Self { + r#type: WorkflowDispatchInputType::Boolean { default: Some(default) }, + ..Self::new(description, required) + } + } +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct WorkflowDispatch { + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub inputs: BTreeMap, +} + +impl WorkflowDispatch { + pub fn add_input( + &mut self, + name: impl Into, + input: WorkflowDispatchInput, + ) -> &mut Self { + self.inputs.insert(name.into(), input); + self + } + + pub fn with_input>(mut self, name: S, input: WorkflowDispatchInput) -> Self { + self.inputs.insert(name.into(), input); + self + } +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub struct Event { + #[serde(skip_serializing_if = "Option::is_none")] + pub push: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub pull_request: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub schedule: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub workflow_dispatch: Option, +} + +impl Event { + pub fn new() -> Self { + Self::default() + } + + pub fn push(&mut self, push: Push) -> &mut Self { + self.push = Some(push); + self + } + + pub fn pull_request(&mut self, pull_request: PullRequest) -> &mut Self { + self.pull_request = Some(pull_request); + self + } + + pub fn schedule(&mut self, schedule: Schedule) -> &mut Self { + self.schedule.push(schedule); + self + } + + pub fn workflow_dispatch(&mut self, workflow_dispatch: WorkflowDispatch) -> &mut Self { + self.workflow_dispatch = Some(workflow_dispatch); + self + } +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Job { + pub name: String, + #[serde(skip_serializing_if = "BTreeSet::is_empty")] + pub needs: BTreeSet, + pub runs_on: Vec, + pub steps: Vec, + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub outputs: BTreeMap, + #[serde(skip_serializing_if = "Option::is_none")] + pub strategy: Option, + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub env: BTreeMap, +} + +impl Job { + pub fn new(name: impl Into) -> Self { + Self { name: name.into(), ..default() } + } + + pub fn expose_output(&mut self, step_id: impl AsRef, output_name: impl Into) { + let step = step_id.as_ref(); + let output = output_name.into(); + let value = format!("${{{{ steps.{step}.outputs.{output} }}}}"); + self.outputs.insert(output, value); + } + + pub fn env(&mut self, name: impl Into, value: impl Into) { + self.env.insert(name.into(), value.into()); + } + + pub fn expose_secret_as(&mut self, secret: impl AsRef, given_name: impl Into) { + self.env(given_name, format!("${{{{ secrets.{} }}}}", secret.as_ref())); + } + + pub fn use_job_outputs(&mut self, job_id: impl Into, job: &Job) { + let job_id = job_id.into(); + for output_name in job.outputs.keys() { + let reference = format!("${{{{needs.{}.outputs.{}}}}}", job_id, output_name); + self.env.insert(output_name.into(), reference); + } + self.needs(job_id); + } + + pub fn needs(&mut self, job_id: impl Into) { + self.needs.insert(job_id.into()); + } +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Strategy { + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub matrix: BTreeMap, + #[serde(skip_serializing_if = "Option::is_none")] + pub fail_fast: Option, +} + +impl Strategy { + pub fn new( + matrix_entries: impl IntoIterator< + Item = (impl Into, impl IntoIterator), + >, + ) -> Result { + let mut ret = Self::default(); + for (key, value) in matrix_entries { + ret.insert_to_matrix(key, value)?; + } + Ok(ret) + } + + pub fn fail_fast(mut self, fail_fast: bool) -> Self { + self.fail_fast = Some(fail_fast); + self + } + + pub fn insert_to_matrix( + &mut self, + name: impl Into, + values: impl IntoIterator, + ) -> Result<&mut Self> { + let values = values.into_iter().map(serde_json::to_value).try_collect_vec()?; + self.matrix.insert(name.into(), serde_json::Value::Array(values)); + Ok(self) + } + + pub fn new_os(labels: impl Serialize) -> Strategy { + let oses = serde_json::to_value(labels).unwrap(); + Strategy { + fail_fast: Some(false), + matrix: [("os".to_string(), oses)].into_iter().collect(), + } + } +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Step { + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub uses: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub run: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub r#if: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub with: Option, + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub env: BTreeMap, + #[serde(skip_serializing_if = "Option::is_none")] + pub shell: Option, +} + +impl Step { + pub fn with_secret_exposed_as( + self, + secret: impl AsRef, + given_name: impl Into, + ) -> Self { + let secret_expr = wrap_expression(format!("secrets.{}", secret.as_ref())); + self.with_env(given_name, secret_expr) + } + + pub fn with_env(mut self, name: impl Into, value: impl Into) -> Self { + self.env.insert(name.into(), value.into()); + self + } + + pub fn with_if(mut self, condition: impl Into) -> Self { + self.r#if = Some(condition.into()); + self + } + + pub fn with_id(mut self, id: impl Into) -> Self { + self.id = Some(id.into()); + self + } + + pub fn with_name(mut self, name: impl Into) -> Self { + self.name = Some(name.into()); + self + } + + pub fn with_custom_argument( + mut self, + name: impl Into, + value: impl Into, + ) -> Self { + match &mut self.with { + Some(step::Argument::Other(map)) => { + map.insert(name.into(), value.into()); + } + _ => { + if let Some(previous) = self.with { + warn!("Dropping previous step argument: {:?}", previous); + } + self.with = Some(step::Argument::new_other(name, value)); + } + } + self + } +} + +pub fn github_token_env() -> (String, String) { + ("GITHUB_TOKEN".into(), "${{ secrets.GITHUB_TOKEN }}".into()) +} + +impl IntoIterator for Step { + type Item = Step; + type IntoIter = std::iter::Once; + fn into_iter(self) -> Self::IntoIter { + once(self) + } +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum Shell { + /// Command Prompt. + Cmd, + Bash, + /// Power Shell. + Pwsh, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum CheckoutArgumentSubmodules { + True, + False, + Recursive, +} + +pub mod step { + use super::*; + + + #[derive(Clone, Debug, Serialize, Deserialize)] + #[serde(rename_all = "kebab-case")] + #[serde(untagged)] + pub enum Argument { + #[serde(rename_all = "kebab-case")] + Checkout { + #[serde(skip_serializing_if = "Option::is_none")] + clean: Option, + #[serde(skip_serializing_if = "Option::is_none")] + submodules: Option, + }, + #[serde(rename_all = "kebab-case")] + SetupConda { + #[serde(skip_serializing_if = "Option::is_none")] + update_conda: Option, + #[serde(skip_serializing_if = "Option::is_none")] + conda_channels: Option, // conda_channels: Vec + }, + #[serde(rename_all = "kebab-case")] + GitHubScript { + script: String, + }, + Other(BTreeMap), + } + + impl Argument { + pub fn new_other(name: impl Into, value: impl Into) -> Self { + Argument::Other(BTreeMap::from_iter([(name.into(), value.into())])) + } + } +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum RunnerLabel { + #[serde(rename = "self-hosted")] + SelfHosted, + #[serde(rename = "macOS")] + MacOS, + #[serde(rename = "Linux")] + Linux, + #[serde(rename = "Windows")] + Windows, + #[serde(rename = "engine")] + Engine, + #[serde(rename = "macos-latest")] + MacOSLatest, + #[serde(rename = "linux-latest")] + LinuxLatest, + #[serde(rename = "windows-latest")] + WindowsLatest, + #[serde(rename = "X64")] + X64, + #[serde(rename = "mwu-deluxe")] + MwuDeluxe, + #[serde(rename = "benchmark")] + Benchmark, + #[serde(rename = "${{ matrix.os }}")] + MatrixOs, +} + +pub fn checkout_repo_step() -> impl IntoIterator { + // This is a workaround for a bug in GH actions/checkout. If a submodule is added and removed, + // it effectively breaks any future builds of this repository on a given self-hosted runner. + // The workaround step below comes from: + // https://github.com/actions/checkout/issues/590#issuecomment-970586842 + // + // As an exception to general rule, we use here bash even on Windows. As the bash us the one + // coming from a git installation, we can assume that git works nicely with it. + // Having this rewritten to github-script might have been nicer but it does not seem + // effort-worthy. + // + // See: + // https://github.com/actions/checkout/issues/590 + // https://github.com/actions/checkout/issues/788 + // and many other duplicate reports. + let git_bash_command = "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"; + let submodules_workaround_win = Step { + // We can't add git-bash to PATH because this would break the Rust build. + // Instead we manually spawn the bash with a given command from CMD shell. + run: Some(format!(r#""c:\Program Files\Git\bin\bash.exe" -c "{}""#, git_bash_command)), + shell: Some(Shell::Cmd), + r#if: Some(is_windows_runner()), + name: Some( + "Workaround for https://github.com/actions/checkout/issues/590 (Windows)".into(), + ), + ..default() + }; + let submodules_workaround_linux = Step { + run: Some(git_bash_command.into()), + shell: Some(Shell::Bash), + r#if: Some(is_non_windows_runner()), + name: Some( + "Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)".into(), + ), + ..default() + }; + let actual_checkout = Step { + name: Some("Checking out the repository".into()), + // FIXME: Check what is wrong with v3. Seemingly Engine Tests fail because there's only a + // shallow copy of the repo. + uses: Some("actions/checkout@v2".into()), + with: Some(step::Argument::Checkout { + clean: Some(false), + submodules: Some(CheckoutArgumentSubmodules::Recursive), + }), + ..default() + }; + [submodules_workaround_win, submodules_workaround_linux, actual_checkout] +} + +pub trait JobArchetype { + fn id_key_base() -> String { + std::any::type_name::().to_kebab_case() + } + + fn key(os: OS) -> String { + format!("{}-{}", Self::id_key_base(), os) + } + + fn job(os: OS) -> Job; + + fn entry(os: OS) -> (String, Job) { + (Self::key(os), Self::job(os)) + } + + // [Step ID] => [variable names] + fn outputs() -> BTreeMap> { + default() + } + + fn expose_outputs(job: &mut Job) { + for (step_id, outputs) in Self::outputs() { + for output in outputs { + job.expose_output(&step_id, output); + } + } + } +} diff --git a/build/ci_utils/src/anyhow.rs b/build/ci_utils/src/anyhow.rs new file mode 100644 index 0000000000..ff5d665a05 --- /dev/null +++ b/build/ci_utils/src/anyhow.rs @@ -0,0 +1,46 @@ +use crate::prelude::*; + +use anyhow::Error; + + + +pub trait ResultExt { + fn anyhow_err(self) -> Result; + + #[allow(clippy::type_complexity)] + fn flatten_fut( + self, + ) -> futures::future::Either< + std::future::Ready>, + futures::future::IntoFuture, + > + where T: TryFuture>; + + // fn flatten_fut(self) -> impl Future> + // where T: TryFuture> { + // async move { fut?.into_future().await } + // } + // fn flatten_fut(self) + // where T: TryFuture; +} + +impl ResultExt for std::result::Result +where E: Into +{ + fn anyhow_err(self) -> Result { + self.map_err(E::into) + } + + fn flatten_fut( + self, + ) -> futures::future::Either< + std::future::Ready>, + futures::future::IntoFuture, + > + where T: TryFuture> { + match self { + Ok(fut) => fut.into_future().right_future(), + Err(e) => ready(Err(T::Error::from(e))).left_future(), + } + } +} diff --git a/build/ci_utils/src/archive.rs b/build/ci_utils/src/archive.rs new file mode 100644 index 0000000000..fea76b0731 --- /dev/null +++ b/build/ci_utils/src/archive.rs @@ -0,0 +1,205 @@ +use crate::prelude::*; + +use crate::fs::create_dir_if_missing; +use crate::programs::tar::Compression; +use crate::programs::tar::Tar; +use crate::programs::SevenZip; + +use tracing::Span; + + +// ============== +// === Export === +// ============== + +pub mod tar; +pub mod zip; + + + +/// Archive formats that we handle. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum Format { + Zip, + SevenZip, + Tar(Option), +} + +impl Format { + /// Deduce the archive format from a given filename. + #[context("Deducing archive format from a filename {}.", filename.as_ref().display())] + pub fn from_filename(filename: impl AsRef) -> Result { + let filename = filename.as_ref(); + let extension = + filename.extension().ok_or_else(|| anyhow!("The path had no extension."))?; + match extension.to_str().unwrap() { + "zip" => Ok(Format::Zip), + "7z" => Ok(Format::SevenZip), + "tgz" => Ok(Format::Tar(Some(Compression::Gzip))), + "txz" => Ok(Format::Tar(Some(Compression::Xz))), + other => + if let Ok(compression) = Compression::deduce_from_extension(other) { + let secondary_extension = + filename.file_stem().map(Path::new).and_then(Path::extension); + if secondary_extension == Some(OsStr::new("tar")) { + Ok(Format::Tar(Some(compression))) + } else { + bail!("Extension `.{}` looks like a tar compression, but there is no `.tar.` component in the name", other) + } + } else { + bail!("Unrecognized archive extension `{}`.", other) + }, + } + } + + /// Extract an archive of this format into a given output directory. + #[tracing::instrument( + name="Unpacking archive.", + skip_all, + fields(self, dest=%output_dir.as_ref().display()), + err)] + pub fn extract( + self, + compressed_data: impl Read + Seek, + output_dir: impl AsRef, + ) -> anyhow::Result<()> { + create_dir_if_missing(&output_dir)?; + match self { + Format::Zip => { + let mut archive = zip::ZipArchive::new(compressed_data)?; + archive.extract(output_dir)?; + } + Format::Tar(Some(Compression::Gzip)) => { + let tar_stream = flate2::read::GzDecoder::new(compressed_data); + let mut archive = ::tar::Archive::new(tar_stream); + archive.unpack(output_dir)?; + } + // Format::SevenZip => { + // let mut cmd = SevenZip.unpack_from_stdin_cmd(output_dir)?; + // cmd.stdin(Stdio::piped()); + // let mut child = cmd.as_std().clone().spawn()?; + // //let child = cmd.spawn_nicer()?; + // let mut stdin = + // child.stdin.ok_or_else(|| anyhow!("Failed to get 7z stdin handle"))?; + // std::io::copy(&mut compressed_data, &mut stdin)?; + // drop(stdin); + // child.wait()?.exit_ok()?; + // } + _ => todo!("Not supported!"), + } + Ok(()) + } +} + + +pub async fn create( + output_archive: impl AsRef, + paths_to_pack: impl IntoIterator>, +) -> Result { + let span = info_span!("Creating an archive", target = output_archive.as_ref().as_str()); + let format = Format::from_filename(&output_archive)?; + match format { + Format::Zip | Format::SevenZip => + SevenZip.pack(output_archive, paths_to_pack).instrument(span).await, + Format::Tar(_) => Tar.pack(output_archive, paths_to_pack).instrument(span).await, + } +} + + +pub fn is_archive_name(path: impl AsRef) -> bool { + Format::from_filename(path).is_ok() +} + +#[tracing::instrument( + name="Packing directory.", + skip_all, + fields(src=%root_directory.as_ref().display(), dest=%output_archive.as_ref().display()), + err)] +pub async fn pack_directory_contents( + output_archive: impl AsRef, + root_directory: impl AsRef, +) -> Result { + let format = Format::from_filename(&output_archive)?; + match format { + Format::Zip | Format::SevenZip => + SevenZip.pack_directory_contents(output_archive, root_directory).await, + Format::Tar(compression) => + Tar.pack_directory_contents(compression, output_archive, root_directory).await, + } +} + +#[tracing::instrument( + name="Extracting item from archive.", + skip(archive_path, item_path, output_path), + fields( + src = %archive_path.as_ref().display(), + item = %item_path.as_ref().display(), + dest = %output_path.as_ref().display()), + err)] +pub async fn extract_item( + archive_path: impl AsRef, + item_path: impl AsRef, + output_path: impl AsRef, +) -> Result { + let format = Format::from_filename(&archive_path)?; + let archive_path = archive_path.as_ref().to_path_buf(); + let item_path = item_path.as_ref().to_path_buf(); + let output_path = output_path.as_ref().to_path_buf(); + + let extract_task = match format { + Format::Zip => { + let mut archive = zip::open(&archive_path)?; + tokio::task::spawn_blocking(move || { + zip::extract_subtree(&mut archive, item_path, output_path) + }) + } + Format::Tar(Some(Compression::Gzip)) => { + let mut archive = tar::open_tar_gz(&archive_path)?; + tokio::task::spawn_blocking(move || { + tar::extract_subtree(&mut archive, item_path, output_path) + }) + } + _ => todo!(), + }; + extract_task.instrument(Span::current()).await??; + Ok(()) +} + +#[tracing::instrument(name="Extracting the archive to a directory.", skip(archive_path,output_directory), fields(src=%archive_path.as_ref().display(), dest=%output_directory.as_ref().display()), err)] +pub async fn extract_to( + archive_path: impl AsRef, + output_directory: impl AsRef, +) -> Result { + // Don't clean the output directory. Perhaps even the archive lives there. + let span = info_span!( + "Extracting the archive.", + source = archive_path.as_ref().as_str(), + target = output_directory.as_ref().as_str() + ); + let format = Format::from_filename(&archive_path)?; + match format { + Format::Zip | Format::SevenZip => + SevenZip.unpack_cmd(archive_path, output_directory)?.run_ok().instrument(span).await, + Format::Tar(_) => Tar.unpack(archive_path, output_directory).instrument(span).await, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn format_from_filename() -> Result { + assert_eq!( + Format::from_filename("/tmp/.tmpnejBKd/gui_wasm.tar.gz")?, + Format::Tar(Some(Compression::Gzip)) + ); + Ok(()) + } + + #[test] + fn archive_checker() { + assert!(is_archive_name("enso-project-manager-0.2.31-linux-amd64.tar.gz")); + assert!(is_archive_name("enso-project-manager-0.2.31-windows-amd64.zip")); + } +} diff --git a/build/ci_utils/src/archive/tar.rs b/build/ci_utils/src/archive/tar.rs new file mode 100644 index 0000000000..d9ace77802 --- /dev/null +++ b/build/ci_utils/src/archive/tar.rs @@ -0,0 +1,30 @@ +use crate::prelude::*; + +use flate2::read::GzDecoder; +use std::fs::File; +use tar::Archive; + + + +pub fn open_tar_gz(path: impl AsRef) -> Result>> { + let file = crate::fs::open(&path)?; + let tar_stream = flate2::read::GzDecoder::new(file); + Ok(tar::Archive::new(tar_stream)) +} + +pub fn extract_subtree( + archive: &mut Archive, + prefix: impl AsRef, + output: impl AsRef, +) -> Result { + for entry in archive.entries()? { + let mut entry = entry?; + let path_in_archive = entry.path()?; + if let Ok(relative_path) = path_in_archive.strip_prefix(&prefix) { + let output = output.as_ref().join(relative_path); + trace!("Extracting {}", output.display()); + entry.unpack(output)?; + } + } + Ok(()) +} diff --git a/build/ci_utils/src/archive/zip.rs b/build/ci_utils/src/archive/zip.rs new file mode 100644 index 0000000000..0836e5a061 --- /dev/null +++ b/build/ci_utils/src/archive/zip.rs @@ -0,0 +1,70 @@ +use crate::prelude::*; + +use anyhow::Context; +use std::io::Cursor; +use zip::read::ZipFile; + + + +pub use ::zip::*; + +pub fn open(path: impl AsRef) -> Result> { + ZipArchive::new(crate::fs::open(path)?).anyhow_err() +} + +#[context("Failed to extract in-memory archive to {}.", output_dir.as_ref().display())] +pub fn extract_bytes(bytes: Bytes, output_dir: impl AsRef) -> Result { + let mut archive = zip::ZipArchive::new(Cursor::new(&bytes))?; + archive.extract(&output_dir)?; + Ok(()) +} + +pub fn extract_file(file: &mut ZipFile, output: impl AsRef) -> Result { + if file.is_dir() { + crate::fs::create_dir_if_missing(&output)?; + } else { + let mut output_file = crate::fs::create(&output)?; + std::io::copy(file, &mut output_file)?; + } + + // We could consider setting file modification time, but the header data is not really reliable. + // Leaving as-is for now. See: https://github.com/zip-rs/zip/issues/156#issuecomment-652981904 + + // Get and Set permissions + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + if let Some(mode) = file.unix_mode() { + std::fs::set_permissions(&output, std::fs::Permissions::from_mode(mode))?; + } + } + Ok(()) +} + + +#[tracing::instrument( + name="Extracting subtree from archive.", + skip_all, + fields( + prefix = %prefix.as_ref().display(), + dest = %output.as_ref().display()), + err)] +pub fn extract_subtree( + archive: &mut ZipArchive, + prefix: impl AsRef, + output: impl AsRef, +) -> Result { + // let bar = crate::global::new_spinner("Extracting archive."); + for index in 0..archive.len() { + let mut file = archive.by_index(index)?; + let path_in_archive = file + .enclosed_name() + .context(format!("Illegal path in the archive: {}", file.name()))?; + if let Ok(relative_path) = path_in_archive.strip_prefix(&prefix) { + let output = output.as_ref().join(relative_path); + trace!("Extracting {}", output.display()); + extract_file(&mut file, output)?; + } + } + Ok(()) +} diff --git a/build/ci_utils/src/buffer.rs b/build/ci_utils/src/buffer.rs new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/build/ci_utils/src/buffer.rs @@ -0,0 +1 @@ + diff --git a/build/ci_utils/src/cache.rs b/build/ci_utils/src/cache.rs new file mode 100644 index 0000000000..002ee78bc7 --- /dev/null +++ b/build/ci_utils/src/cache.rs @@ -0,0 +1,199 @@ +use crate::prelude::*; + +use anyhow::Context; +use serde::de::DeserializeOwned; +use sha2::Digest; +use std::hash::Hasher; + + +// ============== +// === Export === +// ============== + +pub mod archive; +pub mod artifact; +pub mod asset; +pub mod download; +pub mod goodie; + +pub use goodie::Goodie; + + + +/// Format of the hashing scheme. +/// +/// This value can be bumped to invalidate all the hashes. +pub const VERSION: u8 = 2; + +/// Default location of the cache root. +pub fn default_path() -> Result { + Ok(dirs::data_local_dir() + .context("Cannot locate user's local data directory.")? + .join_iter([".enso-ci", "cache"])) +} + +/// Description of the entity that can be cached. +pub trait Storable: Debug + Send + Sync + 'static { + /// Data necessary to construct output from a disk storage. + type Metadata: Clone + Debug + Serialize + DeserializeOwned + Send + Sync + 'static; + /// An instance of the cached entity. + type Output: Clone + Send + Sync + 'static; + /// A key used to generate a hash. + type Key: Clone + Debug + Serialize + DeserializeOwned + Send + Sync + 'static; + + /// Fill the cache store with this entity. + /// + /// The cache `store` parameter is an existing, writable, empty directory. The store path should + /// not be assumed to be constant for this entry, metadata should not include it in any way. + fn generate(&self, cache: Cache, store: PathBuf) -> BoxFuture<'static, Result>; + + fn adapt( + &self, + cache: PathBuf, + metadata: Self::Metadata, + ) -> BoxFuture<'static, Result>; + + fn key(&self) -> Self::Key; +} + +/// The required metadata for a cache entry. Used when reading the cache. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EntryIndexRequired { + pub metadata: S::Metadata, +} + +/// The metadata for a cache entry with additional information to help debugging. +#[derive(Clone, Debug, Serialize, Deserialize, derive_more::Deref, derive_more::DerefMut)] +#[serde(bound = "S:")] +pub struct EntryIndexExtended { + #[serde(flatten)] + #[deref] + #[deref_mut] + pub inner: EntryIndexRequired, + pub key: Option, + pub r#type: Option, + pub key_type: Option, + pub schema_version: Option, +} + +impl EntryIndexExtended { + pub fn new(metadata: S::Metadata, key: S::Key) -> Self { + Self { + inner: EntryIndexRequired { metadata }, + key: Some(key), + r#type: Some(std::any::type_name::().into()), + key_type: Some(std::any::type_name::().into()), + schema_version: Some(VERSION), + } + } +} + +#[derive(Debug)] +pub struct HashToDigest<'a, D: Digest>(&'a mut D); +impl<'a, D: Digest> Hasher for HashToDigest<'a, D> { + fn finish(&self) -> u64 { + todo!() + } + + fn write(&mut self, bytes: &[u8]) { + self.0.update(bytes) + } +} + +pub fn digest(storable: &S) -> Result { + let key = storable.key(); + let key_serialized = bincode::serialize(&key)?; + + let mut digest = sha2::Sha224::default(); + sha2::Digest::update(&mut digest, [VERSION]); + sha2::Digest::update(&mut digest, &key_serialized); + std::any::TypeId::of::().hash(&mut HashToDigest(&mut digest)); + std::any::TypeId::of::().hash(&mut HashToDigest(&mut digest)); + let digest = digest.finalize(); + Ok(data_encoding::BASE64URL_NOPAD.encode(&digest)) +} + +#[derive(Clone, Debug)] +pub struct Cache { + root: PathBuf, +} + +impl Cache { + pub async fn new_default() -> Result { + Self::new(default_path()?).await + } + + /// Path to the cache root. + pub fn path(&self) -> &Path { + &self.root + } + + pub async fn new(path: impl Into) -> Result { + let root = path.into(); + crate::fs::tokio::create_dir_if_missing(&root).await?; + debug!("Prepared cache in {}", root.display()); + Ok(Self { root }) + } + + pub fn get(&self, storable: S) -> BoxFuture<'static, Result> + where S: Storable { + let this = self.clone(); + async move { + let digest = digest(&storable)?; + tracing::Span::current().record("digest", digest.as_str()); + let entry_dir = this.root.join(&digest); + let entry_meta = entry_dir.with_appended_extension("json"); + + let retrieve = async { + let info = entry_meta.read_to_json::>()?; + crate::fs::require_exist(&entry_dir)?; + storable.adapt(entry_dir.clone(), info.metadata).await + }; + + match retrieve.await { + Ok(out) => { + trace!("Found in cache, skipping generation."); + Ok(out) + } + Err(e) => { + trace!("Value cannot be retrieved from cache because: {e}"); + crate::fs::reset_dir(&entry_dir)?; + let key = storable.key(); + tracing::Span::current().record("key", &tracing::field::debug(&key)); + let metadata = storable + .generate(this, entry_dir.clone()) + .instrument(info_span!("Generating value to fill the cache.")) + .await?; + let info = EntryIndexExtended::::new(metadata, key); + entry_meta.write_as_json(&info)?; + storable.adapt(entry_dir, info.inner.metadata).await + } + } + } + .instrument(trace_span!( + "Getting a value from cache.", + digest = tracing::field::Empty, + key = tracing::field::Empty + )) + .boxed() + } +} + + +#[cfg(test)] +mod tests { + use super::*; + use crate::cache::download::DownloadFile; + use crate::log::setup_logging; + + #[tokio::test] + #[ignore] + async fn cache_test() -> Result { + setup_logging()?; + let download_task = DownloadFile::new("https://store.akamai.steamstatic.com/public/shared/images/header/logo_steam.svg?t=962016")?; + + let cache = Cache::new("C:/temp/enso-cache").await?; + cache.get(download_task).await?; + Ok(()) + } +} diff --git a/build/ci_utils/src/cache/archive.rs b/build/ci_utils/src/cache/archive.rs new file mode 100644 index 0000000000..247717d800 --- /dev/null +++ b/build/ci_utils/src/cache/archive.rs @@ -0,0 +1,49 @@ +use crate::prelude::*; + +use crate::cache::Cache; +use crate::cache::Storable; + + + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Key { + pub archive_source_key: S, + pub path_to_extract: Option, +} + +#[derive(Clone, Debug)] +pub struct ExtractedArchive { + pub archive_source: S, + pub path_to_extract: Option, +} + +impl + Clone> Storable for ExtractedArchive { + type Metadata = (); + type Output = PathBuf; + type Key = Key; + + fn generate(&self, cache: Cache, store: PathBuf) -> BoxFuture<'static, Result> { + let Self { path_to_extract, archive_source } = self.clone(); + let get_archive_job = cache.get(archive_source); + async move { + let archive_path = get_archive_job.await?; + if let Some(path_to_extract) = path_to_extract { + crate::archive::extract_item(&archive_path, path_to_extract, &store).await + } else { + crate::archive::extract_to(&archive_path, &store).await + } + } + .boxed() + } + + fn adapt(&self, cache: PathBuf, _: Self::Metadata) -> BoxFuture<'static, Result> { + async move { Ok(cache) }.boxed() + } + + fn key(&self) -> Self::Key { + Key { + archive_source_key: self.archive_source.key(), + path_to_extract: self.path_to_extract.clone(), + } + } +} diff --git a/build/ci_utils/src/cache/artifact.rs b/build/ci_utils/src/cache/artifact.rs new file mode 100644 index 0000000000..4f0b4c5123 --- /dev/null +++ b/build/ci_utils/src/cache/artifact.rs @@ -0,0 +1,54 @@ +use crate::prelude::*; + +use crate::cache::Cache; +use crate::cache::Storable; +use crate::models::config::RepoContext; + +use octocrab::models::ArtifactId; + + + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Key { + pub repository: RepoContext, + pub artifact_id: ArtifactId, +} + +#[derive(Clone, Debug)] +pub struct ExtractedArtifact { + pub key: Key, + pub client: Octocrab, +} + +impl Storable for ExtractedArtifact { + type Metadata = (); + type Output = PathBuf; + type Key = Key; + + fn generate( + &self, + _cache: Cache, + store: PathBuf, + ) -> BoxFuture<'static, Result> { + let this = self.clone(); + async move { + let ExtractedArtifact { client, key } = this; + let Key { artifact_id, repository } = key; + repository.download_and_unpack_artifact(&client, artifact_id, &store).await?; + Ok(()) + } + .boxed() + } + + fn adapt( + &self, + cache: PathBuf, + _metadata: Self::Metadata, + ) -> BoxFuture<'static, Result> { + ready(Result::Ok(cache)).boxed() + } + + fn key(&self) -> Self::Key { + self.key.clone() + } +} diff --git a/build/ci_utils/src/cache/asset.rs b/build/ci_utils/src/cache/asset.rs new file mode 100644 index 0000000000..7571af89a3 --- /dev/null +++ b/build/ci_utils/src/cache/asset.rs @@ -0,0 +1,40 @@ +// use crate::prelude::*; +// +// use crate::models::config::RepoContext; +// use octocrab::repos::RepoHandler; +// use reqwest::RequestBuilder; +// +// pub struct DownloadAsset { +// pub octocrab: Octocrab, +// pub repo: RepoContext, +// } +// +// impl DownloadAsset { +// fn download_asset_request(&self) -> RequestBuilder { +// self.octocrab +// .client +// .get(self.url.clone()) +// .header(reqwest::header::ACCEPT, mime::APPLICATION_OCTET_STREAM.as_ref()) +// } +// +// async fn cached(&self, cache: &Cache) -> Result { +// let job = crate::cache::download::DownloadFile { +// client: self.octocrab.client.clone(), +// key: crate::cache::download::Key { +// url: self.url.clone(), +// additional_headers: HeaderMap::from_iter([( +// reqwest::header::ACCEPT, +// HeaderValue::from_static(mime::APPLICATION_OCTET_STREAM.as_ref()), +// )]), +// }, +// }; +// cache.get(job).await +// } +// +// async fn get(&self) -> Result { +// let request = self.download_asset_request(); +// let response = request.send().await?; +// let response = crate::io::web::handle_error_response(response).await?; +// Ok(response) +// } +// } diff --git a/build/ci_utils/src/cache/download.rs b/build/ci_utils/src/cache/download.rs new file mode 100644 index 0000000000..52c014a49f --- /dev/null +++ b/build/ci_utils/src/cache/download.rs @@ -0,0 +1,95 @@ +use crate::prelude::*; + +use crate::cache::Cache; +use crate::cache::Storable; +use crate::io::filename_from_url; +use crate::io::web::filename_from_response; +use crate::io::web::handle_error_response; +use crate::io::web::stream_response_to_file; + +use derivative::Derivative; +use headers::HeaderMap; +use reqwest::Client; +use reqwest::IntoUrl; +use reqwest::Response; + + + +#[derive(Clone, Derivative, Serialize, Deserialize)] +#[derivative(Debug)] +pub struct Key { + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub url: Url, + + /// We keep this as part of the key, as some GitHub API endpoints change their meaning based on + /// the headers set. + #[serde(with = "http_serde::header_map")] + pub additional_headers: HeaderMap, +} + +#[derive(Clone, Debug)] +pub struct DownloadFile { + pub key: Key, + pub client: Client, +} + +impl DownloadFile { + pub fn new(url: impl IntoUrl) -> Result { + Ok(Self { + key: Key { url: url.into_url()?, additional_headers: default() }, + client: default(), + }) + } + + + pub fn send_request(&self) -> BoxFuture<'static, Result> { + let response = self + .client + .get(self.key.url.clone()) + .headers(self.key.additional_headers.clone()) + .send(); + + let span = info_span!("Downloading a file.", url = %self.key.url); + async move { handle_error_response(response.await?).await }.instrument(span).boxed() + } +} + +impl Storable for DownloadFile { + type Metadata = PathBuf; + type Output = PathBuf; + type Key = Key; + + fn generate( + &self, + _cache: Cache, + store: PathBuf, + ) -> BoxFuture<'static, Result> { + // FIXME use `download_to_dir` + let response = self.send_request(); + let filename = filename_from_url(&self.key.url); + async move { + let response = response.await?; + let last_fallback_name = PathBuf::from("data"); + let filename = filename_from_response(&response) + .map(ToOwned::to_owned) + .or(filename) + .unwrap_or(last_fallback_name); + let output = store.join(&filename); + stream_response_to_file(response, &output).await?; + Ok(filename) // We don't store absolute paths to keep cache relocatable. + } + .boxed() + } + + fn adapt( + &self, + store: PathBuf, + metadata: Self::Metadata, + ) -> BoxFuture<'static, Result> { + ready(Ok(store.join(metadata))).boxed() + } + + fn key(&self) -> Self::Key { + self.key.clone() + } +} diff --git a/build/ci_utils/src/cache/goodie.rs b/build/ci_utils/src/cache/goodie.rs new file mode 100644 index 0000000000..a002f12ff6 --- /dev/null +++ b/build/ci_utils/src/cache/goodie.rs @@ -0,0 +1,84 @@ +use crate::prelude::*; + +use crate::cache; +use crate::cache::Cache; + + +// ============== +// === Export === +// ============== + +pub mod binaryen; +pub mod graalvm; +pub mod sbt; + + + +/// Something that can be downloaded and, after that, enabled by modifying global state. +pub trait Goodie: Debug + Clone + Send + Sync + 'static { + fn url(&self) -> BoxFuture<'static, Result>; + fn is_active(&self) -> BoxFuture<'static, Result>; + fn activate(&self, package_path: PathBuf) -> Result; +} + +pub trait GoodieExt: Goodie { + fn install_if_missing(&self, cache: &Cache) -> BoxFuture<'static, Result> { + let this = self.clone(); + let cache = cache.clone(); + async move { + if this.is_active().await.unwrap_or(false) { + trace!("Skipping activation of {this:?} because it already present.",); + } else { + let package = this.download(&cache).await?; + this.activate(package)?; + } + Result::Ok(()) + } + .boxed() + } + + + fn package( + &self, + ) -> BoxFuture<'static, Result>> + { + let url_fut = self.url(); + async move { + let url = url_fut.await?; + let archive_source = cache::download::DownloadFile::new(url)?; + let path_to_extract = None; + Ok(cache::archive::ExtractedArchive { archive_source, path_to_extract }) + } + .boxed() + } + + fn download(&self, cache: &Cache) -> BoxFuture<'static, Result> { + let package = self.package(); + let cache = cache.clone(); + async move { cache.get(package.await?).await }.boxed() + } +} + +impl GoodieExt for T {} +// +// /// Whoever owns a token, can assume that the Goodie is available. +// #[derive(Clone, Debug, Display)] +// pub struct Token(G); +// +// #[derive(Clone, Debug, Display)] +// pub struct PotentialFutureGoodie(Box BoxFuture<'static, Result>>>); +// +// impl PotentialFutureGoodie { +// pub fn new(f: F) -> Self +// where +// F: FnOnce() -> Fut + 'static, +// Fut: Future>> + Send + 'static, { +// Self(Box::new(move || f().boxed())) +// } +// } +// +// // pub type GoodieGenerator = +// // dyn FnOnce(Cache, G) -> BoxFuture<'static, Result>> + Send + Sync + 'static; +// // +// // pub type PotentialFutureGoodie = +// // dyn FnOnce(Cache) -> BoxFuture<'static, Result>> + Send + Sync + 'static; diff --git a/build/ci_utils/src/cache/goodie/binaryen.rs b/build/ci_utils/src/cache/goodie/binaryen.rs new file mode 100644 index 0000000000..aa18ba91ca --- /dev/null +++ b/build/ci_utils/src/cache/goodie/binaryen.rs @@ -0,0 +1,77 @@ +use crate::prelude::*; + +use crate::cache; +use crate::env::prepend_to_path; +use crate::program::version::IsVersionPredicate; +use crate::programs::wasm_opt; +use crate::programs::wasm_opt::WasmOpt; + + + +#[derive(Clone, Copy, Debug, Display)] +pub struct Binaryen { + pub version: u32, +} + +impl IsVersionPredicate for Binaryen { + type Version = wasm_opt::Version; + fn matches(&self, version: &Self::Version) -> bool { + version.0 >= self.version + } +} + +impl Binaryen {} + +impl cache::Goodie for Binaryen { + fn url(&self) -> BoxFuture<'static, Result> { + let version = format!("version_{}", self.version); + async move { + let target = match (TARGET_OS, TARGET_ARCH) { + (OS::Windows, Arch::X86_64) => "x86_64-windows", + (OS::Linux, Arch::X86_64) => "x86_64-linux", + (OS::MacOS, Arch::X86_64) => "x86_64-macos", + (OS::MacOS, Arch::AArch64) => "arm64-macos", + (os, arch) => bail!("Not supported arch/OS combination: {arch}-{os}."), + }; + let url = format!("https://github.com/WebAssembly/binaryen/releases/download/{version}/binaryen-{version}-{target}.tar.gz"); + url.parse2() + }.boxed() + } + + fn is_active(&self) -> BoxFuture<'static, Result> { + let this = *self; + async move { + WasmOpt.require_present_that(this).await?; + Ok(true) + } + .boxed() + } + + fn activate(&self, package_path: PathBuf) -> Result { + let bin_dir = package_path.join(format!("binaryen-version_{}", self.version)).join("bin"); + crate::fs::expect_dir(&bin_dir)?; + prepend_to_path(bin_dir) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::cache; + use crate::cache::Goodie; + use crate::log::setup_logging; + + #[tokio::test] + #[ignore] + async fn install_wasm_opt() -> Result { + setup_logging()?; + let cache = cache::Cache::new_default().await?; + let binaryen = Binaryen { version: 108 }; + binaryen.install_if_missing(&cache).await?; + dbg!(WasmOpt.lookup())?; + + assert!(binaryen.is_active().await?); + + Ok(()) + } +} diff --git a/build/ci_utils/src/cache/goodie/graalvm.rs b/build/ci_utils/src/cache/goodie/graalvm.rs new file mode 100644 index 0000000000..d1d8e22bf5 --- /dev/null +++ b/build/ci_utils/src/cache/goodie/graalvm.rs @@ -0,0 +1,164 @@ +use crate::prelude::*; + +use crate::cache::goodie::Goodie; +use crate::models::config::RepoContext; +use crate::programs::java; +use crate::programs::java::JAVA_HOME; +use crate::programs::Java; + + + +const PACKAGE_PREFIX: &str = "graalvm-ce"; + +const GITHUB_ORGANIZATION: &str = "graalvm"; + +const CE_BUILDS_REPOSITORY: &str = "graalvm-ce-builds"; + + +crate::define_env_var! { + /// Should be the same as `JAVA_HOME` for Graal-based Java distribution. + /// + /// Note that this is not the root directory of the GraalVM installation (at least on macOS), + /// but the directory where the `bin` directory is located. + GRAALVM_HOME, PathBuf; +} + +pub fn graal_version_from_version_string(version_string: &str) -> Result { + let line = version_string.lines().find(|line| line.contains("GraalVM")).context( + "There is a Java environment available but it is not recognizable as GraalVM one.", + )?; + Version::find_in_text(line) +} + +pub async fn find_graal_version() -> Result { + let text = Java.version_string().await?; + graal_version_from_version_string(&text) +} + +/// The repository that contains the GraalVM CE releases for download. +pub fn ce_build_repository() -> RepoContext { + RepoContext { owner: GITHUB_ORGANIZATION.into(), name: CE_BUILDS_REPOSITORY.into() } +} + +/// Description necessary to download and install GraalVM. +#[derive(Clone, Debug)] +pub struct GraalVM { + /// Used to query GitHub about releases. + pub client: Octocrab, + pub graal_version: Version, + pub java_version: java::LanguageVersion, + pub os: OS, + pub arch: Arch, +} + +impl Goodie for GraalVM { + fn url(&self) -> BoxFuture<'static, Result> { + let platform_string = self.platform_string(); + let graal_version = self.graal_version.clone(); + let client = self.client.clone(); + let repo = ce_build_repository(); + async move { + let release = repo.find_release_by_text(&client, &graal_version.to_string()).await?; + crate::github::find_asset_url_by_text(&release, &platform_string).cloned() + } + .boxed() + } + + fn is_active(&self) -> BoxFuture<'static, Result> { + let expected_graal_version = self.graal_version.clone(); + let expected_java_language_version = self.java_version; + async move { + let found_version = find_graal_version().await?; + ensure!(found_version == expected_graal_version, "GraalVM version mismatch. Expected {expected_graal_version}, found {found_version}."); + + let found_java_version = Java.check_language_version().await?; + ensure!( + found_java_version == expected_java_language_version, + "Java language version mismatch. Expected {expected_java_language_version}, found {found_java_version}." + ); + + Result::Ok(true) + } + .boxed() + } + + fn activate(&self, package_path: PathBuf) -> Result { + let package_path = package_path.join(self.root_directory_name()); + let root = match TARGET_OS { + OS::MacOS => package_path.join_iter(["Contents", "Home"]), + _ => package_path, + }; + + JAVA_HOME.set(&root)?; + GRAALVM_HOME.set(&root)?; + crate::env::prepend_to_path(root.join("bin"))?; + Ok(()) + } +} + +impl GraalVM { + pub fn platform_string(&self) -> String { + let Self { graal_version: _graal_version, java_version, arch, os, client: _client } = &self; + let os_name = match *os { + OS::Linux => "linux", + OS::Windows => "windows", + OS::MacOS => "darwin", + other_os => unimplemented!("System `{}` is not supported!", other_os), + }; + let arch_name = match *arch { + Arch::X86_64 => "amd64", + // No Graal packages for Apple Silicon. + Arch::AArch64 if TARGET_OS == OS::MacOS => "amd64", + Arch::AArch64 => "aarch64", + other_arch => unimplemented!("Architecture `{}` is not supported!", other_arch), + }; + let java_version = format!("java{}", java_version.0); + format!("{}-{}-{}-{}", PACKAGE_PREFIX, java_version, os_name, arch_name) + } + + pub fn root_directory_name(&self) -> PathBuf { + PathBuf::from(format!("{}-{}-{}", PACKAGE_PREFIX, self.java_version, self.graal_version)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::cache; + use crate::log::setup_logging; + use crate::programs::graal::Gu; + use crate::programs::Java; + + #[tokio::test] + #[ignore] + async fn test_is_enabled() -> Result { + setup_logging()?; + let graal_version = Version::parse("21.3.0").unwrap(); + let java_version = java::LanguageVersion(11); + let os = TARGET_OS; + let arch = Arch::X86_64; + let client = Octocrab::default(); + let graalvm = GraalVM { graal_version, java_version, os, arch, client }; + + graalvm.install_if_missing(&cache::Cache::new_default().await?).await?; + + Gu.require_present().await?; + // let graalvm = graalvm.is_active().await?; + // assert!(graalvm); + Ok(()) + } + + /// Check that we correctly recognize both the GraalVM version and the Java version. + #[test] + fn version_recognize() { + let version_string = r"openjdk 11.0.13 2021-10-19 +OpenJDK Runtime Environment GraalVM CE 21.3.0 (build 11.0.13+7-jvmci-21.3-b05) +OpenJDK 64-Bit Server VM GraalVM CE 21.3.0 (build 11.0.13+7-jvmci-21.3-b05, mixed mode, sharing)"; + + let found_graal = graal_version_from_version_string(version_string).unwrap(); + assert_eq!(found_graal, Version::new(21, 3, 0)); + + let found_java = Java.parse_version(version_string).unwrap(); + assert_eq!(found_java, Version::new(11, 0, 13)); + } +} diff --git a/build/ci_utils/src/cache/goodie/sbt.rs b/build/ci_utils/src/cache/goodie/sbt.rs new file mode 100644 index 0000000000..6fc478d195 --- /dev/null +++ b/build/ci_utils/src/cache/goodie/sbt.rs @@ -0,0 +1,33 @@ +use crate::prelude::*; + +use crate::cache; +use crate::programs; + + + +const DOWNLOAD_URL_TEXT: &str = "https://github.com/sbt/sbt/releases/download/v1.5.5/sbt-1.5.5.tgz"; + +crate::define_env_var! { + SBT_HOME, PathBuf; +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)] +pub struct Sbt; + +impl cache::Goodie for Sbt { + fn url(&self) -> BoxFuture<'static, Result> { + ready(Url::parse(DOWNLOAD_URL_TEXT).anyhow_err()).boxed() + } + + fn is_active(&self) -> BoxFuture<'static, Result> { + ready(Ok(programs::Sbt.lookup().is_ok())).boxed() + } + + fn activate(&self, package_path: PathBuf) -> Result { + let sbt_home = package_path.join("sbt"); + // Yeah, it is needed. Sbt will fail, if not set. + SBT_HOME.set(&sbt_home)?; + crate::env::prepend_to_path(sbt_home.join("bin"))?; + Ok(()) + } +} diff --git a/build/ci_utils/src/ci.rs b/build/ci_utils/src/ci.rs new file mode 100644 index 0000000000..84fbe2f653 --- /dev/null +++ b/build/ci_utils/src/ci.rs @@ -0,0 +1,15 @@ +use crate::prelude::*; + + + +pub mod env { + crate::define_env_var! { + /// An environment variable set commonly by most of popular CI systems. + CI, String; + } +} + +/// Check if the environment suggests that we are being run in a CI. +pub fn run_in_ci() -> bool { + env::CI.is_set() +} diff --git a/build/ci_utils/src/deploy.rs b/build/ci_utils/src/deploy.rs new file mode 100644 index 0000000000..c1928c581e --- /dev/null +++ b/build/ci_utils/src/deploy.rs @@ -0,0 +1,8 @@ +//! Module with utilities that support programmatic deployment of GitHub Actions Runner. + + +// ============== +// === Export === +// ============== + +pub mod runner; diff --git a/build/ci_utils/src/deploy/runner.rs b/build/ci_utils/src/deploy/runner.rs new file mode 100644 index 0000000000..1fd5a0f400 --- /dev/null +++ b/build/ci_utils/src/deploy/runner.rs @@ -0,0 +1,122 @@ +use crate::prelude::*; + +use crate::models::config::Runner; +use crate::models::config::RunnerLocation; + +use platforms::target::OS; +use std::collections::BTreeSet; + + + +/// Name of the directory with a runner that is placed in runner's container image build context. +/// +/// Must be in sync with relevant entries in `Dockerfile`s (the `ADD` commands). +pub const DIRECTORY_WITH_RUNNER_PACKAGE: &str = "runner"; + +pub const DIRECTORY_WITH_CI_CRATE: &str = "ci"; + +/// Full runner configuration. +#[derive(Clone, Debug)] +pub struct Config { + /// Repository where this runner is registered. + pub location: RunnerLocation, + /// Runner's name. + pub runner: Runner, + /// Operating system of the runner's image. It is possible to have Linux on Windows or macOS, + /// so we don't assume this to be always equal to `TARGET_OS`. + pub os: OS, + pub server_name: String, + pub index: usize, +} + +impl Config { + /// Pretty printed triple with repository owner, repository name and runner name. + pub fn qualified_name(&self) -> String { + let location_prefix = match &self.location { + RunnerLocation::Organization(org) => iformat!("{org.name}"), + RunnerLocation::Repository(repo) => iformat!("{repo.owner}-{repo.name}"), + }; + iformat!("{location_prefix}-{self.runner.name}-{self.server_name}-{self.index}") + } + + /// The custom labels that the runner will be registered with. + /// + /// Apart from them, the GH-defined labels are always used. + pub fn custom_labels(&self) -> BTreeSet { + once(self.runner.name.clone()) + .chain(once(self.server_name.clone())) + .chain(once(self.qualified_name())) + .chain(once(format!("index-{}", self.index))) + .chain(self.runner.labels.as_ref().into_iter().flatten().cloned()) + .collect() + } + + /// The list of custom labels pretty printed in the format expected by the `--labels` argument + /// of the runner's configure script. + pub fn registered_labels_arg(&self) -> OsString { + self.custom_labels().into_iter().join(",").into() + } + + pub fn registered_name(&self) -> String { + format!("{}-{}-{}", &self.runner.name, self.server_name, self.index) + } + + pub fn register_script_call_args( + &self, + token: impl AsRef, + ) -> Result> { + let url = self.location.url()?; + let name = self.registered_name(); + let labels = self.registered_labels_arg(); + Ok([ + "--unattended", + "--replace", + "--name", + name.as_str(), + "--url", + url.as_str(), + "--token", + token.as_ref(), + "--labels", + labels.as_str(), + ] + .map(into)) + } + + pub fn guest_root_path(&self) -> PathBuf { + if self.os == OS::Windows { r"C:\" } else { "/" }.into() + } + + pub fn guest_runner_dir(&self) -> PathBuf { + self.guest_root_path().join(DIRECTORY_WITH_RUNNER_PACKAGE) + } + + pub fn guest_ci_dir(&self) -> PathBuf { + self.guest_root_path().join(DIRECTORY_WITH_CI_CRATE) + } + + pub fn guest_config_script_path(&self) -> PathBuf { + self.guest_runner_dir().join(self.config_script_filename()) + } + + pub fn guest_run_script_path(&self) -> PathBuf { + let mut ret = self.guest_runner_dir().join("run"); + ret.set_extension(script_extension(self.os)); + ret + } + + pub fn config_script_filename(&self) -> PathBuf { + let mut ret = PathBuf::from("config"); + ret.set_extension(script_extension(self.os)); + ret + } +} + +/// The extension used by the scripts that are part of GitHub Actions Runner distribution. +pub fn script_extension(os: OS) -> &'static str { + if os == OS::Windows { + "cmd" + } else { + "sh" + } +} diff --git a/build/ci_utils/src/env.rs b/build/ci_utils/src/env.rs new file mode 100644 index 0000000000..a7622b4143 --- /dev/null +++ b/build/ci_utils/src/env.rs @@ -0,0 +1,444 @@ +use crate::prelude::*; + +use anyhow::Context; +use std::collections::BTreeSet; +use unicase::UniCase; + + + +pub fn current_dir() -> Result { + std::env::current_dir().context("Failed to get current directory.") +} + +#[context("Failed to set current directory to {}.", path.as_ref().display())] +pub fn set_current_dir(path: impl AsRef) -> Result { + debug!("Changing working directory to {}.", path.as_ref().display()); + std::env::set_current_dir(&path).anyhow_err() +} + +#[macro_export] +macro_rules! define_env_var { + () => {}; + ($(#[$attr:meta])* $name: ident, PathBuf; $($tail:tt)*) => { + #[allow(non_upper_case_globals)] + $(#[$attr])* + pub const $name: $crate::env::new::PathBufVariable = + $crate::env::new::PathBufVariable(stringify!($name)); + $crate::define_env_var!($($tail)*); + }; + ($(#[$attr:meta])* $name: ident, String; $($tail:tt)*) => { + #[allow(non_upper_case_globals)] + $(#[$attr])* + pub const $name: $crate::env::new::SimpleVariable = + $crate::env::new::SimpleVariable::new(stringify!($name)); + $crate::define_env_var!($($tail)*); + }; + ($(#[$attr:meta])* $name: ident, $ty_name: ty; $($tail:tt)*) => { + #[allow(non_upper_case_globals)] + $(#[$attr])* + pub const $name: $crate::env::new::SimpleVariable<$ty_name> = + $crate::env::new::SimpleVariable::new(stringify!($name)); + $crate::define_env_var!($($tail)*); + }; +} + + + +pub mod known; + +pub mod new { + use super::*; + use crate::program::command::FallibleManipulator; + + /// An environment variable of known name. + /// + /// "raw" means that we do not know its "real" type and deal only with strings. When more type + /// safety is needed, implement `TypedVariable` as well. + pub trait RawVariable { + /// The name of this environment variable. + fn name(&self) -> &str; + + /// Has this variable been set? + /// + /// Note that a variable may be set to the empty string. This can lead to unexpected + /// results, because in some environments variables can be unset by setting them to the + /// empty string. + fn is_set(&self) -> bool { + std::env::var(self.name()) != Err(std::env::VarError::NotPresent) + } + + fn get_raw(&self) -> Result { + expect_var(self.name()) + } + + fn get_raw_os(&self) -> Result { + expect_var_os(self.name()) + } + + fn set_raw(&self, value: impl AsRef) { + std::env::set_var(self.name(), value); + } + + fn remove(&self) { + std::env::remove_var(self.name()); + } + } + + pub trait TypedVariable: RawVariable { + type Value; + type Borrowed: ?Sized = Self::Value; + + fn parse(&self, value: &str) -> Result; + fn generate(&self, value: &Self::Borrowed) -> Result; + + fn get(&self) -> Result { + self.parse(self.get_raw()?.as_str()) + } + + fn set(&self, value: impl AsRef) -> Result { + let value = self.generate(value.as_ref())?; + self.set_raw(value); + Ok(()) + } + + fn set_workflow_output(&self, value: impl Borrow) -> Result { + crate::actions::workflow::set_output(self.name(), &self.generate(value.borrow())?); + Ok(()) + } + fn set_workflow_env(&self, value: impl Borrow) -> Result { + crate::actions::workflow::set_env(self.name(), &self.generate(value.borrow())?) + } + fn emit_to_workflow(&self, value: impl Borrow) -> Result { + self.set_workflow_output(value.borrow())?; + self.set_workflow_env(value.borrow()) + } + } + + impl> FallibleManipulator + for (Variable, Value) + { + fn try_applying(&self, command: &mut C) -> Result { + let value = self.0.generate(self.1.as_ref())?; + command.env(self.0.name(), value); + Ok(()) + } + } + + #[derive(Clone, Copy, Debug, Display, Ord, PartialOrd, Eq, PartialEq)] + pub struct PathBufVariable(pub &'static str); + + impl const From<&'static str> for PathBufVariable { + fn from(value: &'static str) -> Self { + PathBufVariable(value) + } + } + + impl RawVariable for PathBufVariable { + fn name(&self) -> &str { + self.0 + } + } + + impl TypedVariable for PathBufVariable { + type Value = PathBuf; + type Borrowed = Path; + fn parse(&self, value: &str) -> Result { + PathBuf::from_str(value) + } + fn generate(&self, value: &Self::Borrowed) -> Result { + value.to_str().context("Path is not a valid string.").map(ToString::to_string) + } + } + + #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] + pub struct SimpleVariable { + pub name: Cow<'static, str>, + pub phantom_data: PhantomData, + pub phantom_data2: PhantomData, + } + + impl From<&'static str> for SimpleVariable { + fn from(value: &'static str) -> Self { + SimpleVariable::new(value) + } + } + + impl AsRef for SimpleVariable { + fn as_ref(&self) -> &str { + &self.name + } + } + + impl From<&SimpleVariable> for String { + fn from(value: &SimpleVariable) -> Self { + value.name.to_string() + } + } + + impl From> for String { + fn from(value: SimpleVariable) -> Self { + value.name.to_string() + } + } + + impl SimpleVariable { + pub const fn new(name: &'static str) -> Self { + Self { + name: Cow::Borrowed(name), + phantom_data: PhantomData, + phantom_data2: PhantomData, + } + } + } + + impl RawVariable for SimpleVariable { + fn name(&self) -> &str { + &self.name + } + } + + impl TypedVariable + for SimpleVariable + { + type Value = Value; + type Borrowed = Borrowed; + fn parse(&self, value: &str) -> Result { + Value::from_str(value) + } + fn generate(&self, value: &Self::Borrowed) -> Result { + Ok(Borrowed::to_string(value)) + } + } + + #[derive(Clone, Copy, Debug, Display, Ord, PartialOrd, Eq, PartialEq)] + pub struct PathLike(pub &'static str); + + impl RawVariable for PathLike { + fn name(&self) -> &str { + self.0 + } + } + + impl TypedVariable for PathLike { + type Value = Vec; + fn parse(&self, value: &str) -> Result { + Ok(std::env::split_paths(value).collect()) + } + + fn generate(&self, value: &Self::Value) -> Result { + std::env::join_paths(value)? + .into_string() + .map_err(|e| anyhow!("Not a valid UTF-8 string: '{}'.", e.to_string_lossy())) + } + } + + impl PathLike { + pub fn prepend(&self, value: impl Into) -> Result { + let mut paths = self.get()?; + paths.insert(0, value.into()); + self.set(&paths) + } + } + + /// Environment variable consisting of string separated by a given separator. + #[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)] + pub struct Separated { + pub name: &'static str, + pub separator: &'static str, + } + + impl RawVariable for Separated { + fn name(&self) -> &str { + self.name + } + } + + impl TypedVariable for Separated { + type Value = Vec; + + fn parse(&self, value: &str) -> Result { + Ok(value.split(self.separator).map(ToString::to_string).collect()) + } + + fn generate(&self, value: &Self::Borrowed) -> Result { + Ok(value.join(self.separator)) + } + } +} + +// +// +// impl<'a, T> SpecFromIter for std::slice::Iter<'a, T> { +// fn f(&self) {} +// } + +#[derive(Clone, Copy, Debug, Display, Ord, PartialOrd, Eq, PartialEq)] +pub struct StrLikeVariable { + pub name: &'static str, +} + +impl StrLikeVariable { + pub const fn new(name: &'static str) -> Self { + Self { name } + } +} + +impl Variable for StrLikeVariable { + const NAME: &'static str = ""; + fn name(&self) -> &str { + self.name + } +} + +pub trait Variable { + const NAME: &'static str; + type Value: FromString = String; + + fn format(&self, value: &Self::Value) -> String + where Self::Value: ToString { + value.to_string() + } + + fn name(&self) -> &str { + Self::NAME + } + + fn fetch(&self) -> Result { + self.fetch_as() + } + + fn fetch_as(&self) -> Result { + self.fetch_string()?.parse2() + } + + fn fetch_string(&self) -> Result { + expect_var(self.name()) + } + + fn fetch_os_string(&self) -> Result { + expect_var_os(self.name()) + } + + fn set(&self, value: &Self::Value) + where Self::Value: ToString { + debug!("Setting env {}={}", self.name(), self.format(value)); + std::env::set_var(self.name(), self.format(value)) + } + + fn set_os(&self, value: &Self::Value) + where Self::Value: AsRef { + std::env::set_var(self.name(), value) + } + + fn set_path

(&self, value: &P) + where + Self::Value: AsRef, + P: AsRef, { + std::env::set_var(self.name(), value.as_ref()) + } + + fn emit_env(&self, value: &Self::Value) -> Result + where Self::Value: ToString { + crate::actions::workflow::set_env(self.name(), value) + } + + fn emit(&self, value: &Self::Value) -> Result + where Self::Value: ToString { + self.emit_env(value)?; + crate::actions::workflow::set_output(self.name(), value); + Ok(()) + } + + fn is_set(&self) -> bool { + self.fetch_os_string().is_ok() + } + + fn remove(&self) { + std::env::remove_var(self.name()) + } +} + +const PATH_ENVIRONMENT_NAME: &str = "PATH"; + + +pub fn expect_var(name: impl AsRef) -> Result { + let name = name.as_ref(); + std::env::var(name).context(anyhow!("Missing environment variable {}.", name)) +} + +pub fn expect_var_os(name: impl AsRef) -> Result { + let name = name.as_ref(); + std::env::var_os(name) + .ok_or_else(|| anyhow!("Missing environment variable {}.", name.to_string_lossy())) +} + +pub fn prepend_to_path(path: impl Into) -> Result { + let path = path.into(); + trace!("Prepending {} to {PATH_ENVIRONMENT_NAME}.", path.display()); + let old_value = std::env::var_os(PATH_ENVIRONMENT_NAME); + let old_pieces = old_value.iter().flat_map(std::env::split_paths); + let new_pieces = once(path).chain(old_pieces); + let new_value = std::env::join_paths(new_pieces)?; + std::env::set_var(PATH_ENVIRONMENT_NAME, new_value); + Ok(()) +} + +pub async fn fix_duplicated_env_var(var_name: impl AsRef) -> Result { + let var_name = var_name.as_ref(); + + let mut paths = indexmap::IndexSet::new(); + while let Ok(path) = std::env::var(var_name) { + paths.extend(std::env::split_paths(&path)); + std::env::remove_var(var_name); + } + std::env::set_var(var_name, std::env::join_paths(paths)?); + Ok(()) +} + +#[derive(Clone, Debug)] +pub enum Action { + Remove, + Set(String), + PrependPaths(Vec), +} + +#[derive(Clone, Debug)] +pub struct Modification { + pub variable_name: UniCase, + pub action: Action, +} + +impl Modification { + pub fn apply(&self) -> Result { + let normalized_name = &*self.variable_name; + match &self.action { + Action::Remove => { + debug!("Removing {}", self.variable_name); + std::env::remove_var(normalized_name) + } + Action::Set(value) => { + debug!("Setting {}={}", self.variable_name, value); + std::env::set_var(normalized_name, value); + } + Action::PrependPaths(paths_to_prepend) => + if let Ok(old_value) = std::env::var(normalized_name) { + debug!( + "Prepending to {} the following paths: {:?}", + self.variable_name, paths_to_prepend + ); + let new_paths_set = paths_to_prepend.iter().collect::>(); + let old_paths = std::env::split_paths(&old_value).collect_vec(); + + let old_paths_filtered = + old_paths.iter().filter(|old_path| !new_paths_set.contains(old_path)); + let new_value = + std::env::join_paths(paths_to_prepend.iter().chain(old_paths_filtered))?; + std::env::set_var(&*self.variable_name, new_value); + } else { + let new_value = std::env::join_paths(paths_to_prepend)?; + std::env::set_var(&*self.variable_name, new_value); + }, + }; + Ok(()) + } +} diff --git a/build/ci_utils/src/env/known.rs b/build/ci_utils/src/env/known.rs new file mode 100644 index 0000000000..6234b12074 --- /dev/null +++ b/build/ci_utils/src/env/known.rs @@ -0,0 +1,7 @@ +// use super::*; + +use crate::env::new::PathLike; + + + +pub const PATH: PathLike = PathLike("PATH"); diff --git a/build/ci_utils/src/extensions.rs b/build/ci_utils/src/extensions.rs new file mode 100644 index 0000000000..6360b7313c --- /dev/null +++ b/build/ci_utils/src/extensions.rs @@ -0,0 +1,19 @@ +// ============== +// === Export === +// ============== + +pub mod child; +pub mod clap; +pub mod command; +pub mod from_string; +pub mod future; +pub mod iterator; +pub mod maps; +pub mod octocrab; +pub mod os; +pub mod output; +pub mod path; +pub mod reqwest; +pub mod result; +pub mod str; +pub mod version; diff --git a/build/ci_utils/src/extensions/child.rs b/build/ci_utils/src/extensions/child.rs new file mode 100644 index 0000000000..e0a18f71f4 --- /dev/null +++ b/build/ci_utils/src/extensions/child.rs @@ -0,0 +1,13 @@ +use crate::prelude::*; + + + +pub trait ChildExt { + fn wait_ok(&mut self) -> BoxFuture; +} + +impl ChildExt for tokio::process::Child { + fn wait_ok(&mut self) -> BoxFuture { + async move { self.wait().await?.exit_ok().anyhow_err() }.boxed() + } +} diff --git a/build/ci_utils/src/extensions/clap.rs b/build/ci_utils/src/extensions/clap.rs new file mode 100644 index 0000000000..204822a380 --- /dev/null +++ b/build/ci_utils/src/extensions/clap.rs @@ -0,0 +1,46 @@ +use crate::prelude::*; + +use crate::global::store_static_text; + +use clap::Arg; + + + +/// Extensions to the `clap::Arg`, intended to be used as argument attributes. +pub trait ArgExt<'h>: Sized + 'h { + /// If the given value is `Some`, set it as a default. + /// + /// Useful primarily when presence of default value on a CLI argument depends on runtime + /// conditions. + fn maybe_default>(self, f: impl Borrow>) -> Self; + + fn maybe_default_os>(self, f: Option) -> Self { + let f = f.as_ref().map(AsRef::as_ref); + self.maybe_default(f.as_ref().map(|s| s.as_str())) + } + + fn maybe_default_t(self, f: impl Borrow> + 'h) -> Self { + let printed = f.borrow().as_ref().map(|v| v.to_string()); + self.maybe_default(printed) + } + + /// Like `env` but prefixes the generated environment variable name with + /// `ENVIRONMENT_VARIABLE_NAME_PREFIX`. + fn prefixed_env(self, prefix: impl AsRef) -> Self; +} + +impl<'h> ArgExt<'h> for Arg<'h> { + fn maybe_default>(self, f: impl Borrow>) -> Self { + if let Some(default) = f.borrow().as_ref() { + self.default_value(store_static_text(default)).required(false) + } else { + self + } + } + + fn prefixed_env(self, prefix: impl AsRef) -> Self { + use heck::ToShoutySnakeCase; + let var_name = format!("{}_{}", prefix.as_ref(), self.get_id().to_shouty_snake_case()); + self.env(store_static_text(var_name)) + } +} diff --git a/build/ci_utils/src/extensions/command.rs b/build/ci_utils/src/extensions/command.rs new file mode 100644 index 0000000000..0c428cce73 --- /dev/null +++ b/build/ci_utils/src/extensions/command.rs @@ -0,0 +1,78 @@ +use crate::prelude::*; + +use std::fmt::Write; + + + +pub trait CommandExt { + // fn run_ok(&mut self, program: &impl Program) -> BoxFuture<'static, Result<()>>; + // + // fn output_ok(&mut self) -> BoxFuture<'static, Result>; + // // TODO: `spawn` but does logs like some other methods. They all need a naming unification + // pass. fn spawn_nicer(&mut self) -> Result; + + fn as_std(&self) -> &std::process::Command; + + fn describe(&self) -> String { + let mut ret = String::new(); + let pretty_printed = format!("{:?}", self.as_std()); + let _ = write!(ret, "Command:\n\t{}", pretty_printed); + if let Some(cwd) = self.as_std().get_current_dir() { + let _ = write!(ret, "\n\twith working directory: {}", cwd.display()); + }; + let env = self.as_std().get_envs(); + if !env.is_empty() { + let _ = write!(ret, "\n\twith environment overrides:"); + } + for (name, val) in self.as_std().get_envs() { + let _ = write!( + ret, + "\n\t\t{}={}", + name.to_string_lossy(), + val.map_or(default(), OsStr::to_string_lossy) + ); + } + ret + // ?self.as_std().get_program() + } +} + +#[allow(unused_qualifications)] +impl CommandExt for crate::program::command::Command { + fn as_std(&self) -> &std::process::Command { + self.inner.as_std() + } +} + + +impl CommandExt for std::process::Command { + fn as_std(&self) -> &std::process::Command { + self + } +} + +impl CommandExt for tokio::process::Command { + fn as_std(&self) -> &std::process::Command { + self.as_std() + } + // fn run_ok(&mut self) -> BoxFuture<'static, Result<()>> { + // let pretty = self.describe(); + // debug!("Will run: {}", pretty); + // let status = self.status(); + // async move { status.await?.exit_ok().context(format!("Command failed: {}", pretty)) } + // .boxed() + // } + // + // fn output_ok(&mut self) -> BoxFuture<'static, Result> { + // let pretty = self.describe(); + // debug!("Will run: {}", pretty); + // let output = self.output(); + // async move { output.await.context(format!("Command failed: {}", pretty)) }.boxed() + // } + // + // fn spawn_nicer(&mut self) -> Result { + // let pretty = self.describe(); + // debug!("Spawning {}", pretty); + // self.spawn().context(format!("Failed to spawn: {}", pretty)) + // } +} diff --git a/build/ci_utils/src/extensions/from_string.rs b/build/ci_utils/src/extensions/from_string.rs new file mode 100644 index 0000000000..cf2b814a1f --- /dev/null +++ b/build/ci_utils/src/extensions/from_string.rs @@ -0,0 +1,36 @@ +use crate::prelude::*; + +use anyhow::Context; +use std::any::type_name; + + + +pub trait FromString: Sized { + fn from_str(s: &str) -> Result; + + fn parse_into(text: impl AsRef) -> Result + where + Self: TryInto, + >::Error: Into, { + let value = Self::from_str(text.as_ref())?; + value.try_into().anyhow_err().context(format!( + "Failed to convert {} => {}.", + type_name::(), + type_name::(), + )) + } +} + +impl FromString for T +where + T: std::str::FromStr, + T::Err: Into, +{ + fn from_str(text: &str) -> Result { + text.parse::().anyhow_err().context(format!( + r#"Failed to parse "{}" as {}."#, + text, + type_name::() + )) + } +} diff --git a/build/ci_utils/src/extensions/future.rs b/build/ci_utils/src/extensions/future.rs new file mode 100644 index 0000000000..9cee51f554 --- /dev/null +++ b/build/ci_utils/src/extensions/future.rs @@ -0,0 +1,75 @@ +use crate::prelude::*; + +use futures_util::future::ErrInto; +use futures_util::future::Map; +use futures_util::future::MapErr; +use futures_util::future::MapOk; +use futures_util::stream; +use futures_util::FutureExt as _; +use futures_util::TryFutureExt as _; + + + +fn void(_t: T) {} + +pub trait FutureExt: Future { + fn void(self) -> Map ()> + where Self: Sized { + self.map(void) + } +} + +impl FutureExt for T where T: Future {} + +type FlattenResultFn = + fn(std::result::Result, E>) -> std::result::Result; + +pub trait TryFutureExt: TryFuture { + fn void_ok(self) -> MapOk ()> + where Self: Sized { + self.map_ok(void) + } + + fn anyhow_err(self) -> MapErr anyhow::Error> + where + Self: Sized, + // TODO: we should rely on `into` rather than `from` + anyhow::Error: From, { + self.map_err(anyhow::Error::from) + } + + fn and_then_sync( + self, + f: F, + ) -> Map, F>, FlattenResultFn> + where + Self: Sized, + F: FnOnce(Self::Ok) -> std::result::Result, + Self::Error: Into, + { + self.err_into().map_ok(f).map(std::result::Result::flatten) + } +} + +impl TryFutureExt for T where T: TryFuture {} + + +pub fn receiver_to_stream( + mut receiver: tokio::sync::mpsc::Receiver, +) -> impl Stream { + futures::stream::poll_fn(move |ctx| receiver.poll_recv(ctx)) +} + + + +pub trait TryStreamExt: TryStream { + fn anyhow_err(self) -> stream::MapErr anyhow::Error> + where + Self: Sized, + // TODO: we should rely on `into` rather than `from` + anyhow::Error: From, { + self.map_err(anyhow::Error::from) + } +} + +impl TryStreamExt for T where T: TryStream {} diff --git a/build/ci_utils/src/extensions/iterator.rs b/build/ci_utils/src/extensions/iterator.rs new file mode 100644 index 0000000000..2d9bacbf06 --- /dev/null +++ b/build/ci_utils/src/extensions/iterator.rs @@ -0,0 +1,54 @@ +use crate::prelude::*; + +use std::iter::Rev; +use std::iter::Take; + + + +pub trait IteratorExt: Iterator { + fn try_filter(mut self, mut f: impl FnMut(&Self::Item) -> Result) -> Result + where + Self: Sized, + R: Default + Extend + Sized, { + self.try_fold(default(), |mut acc: R, item| { + acc.extend(f(&item)?.then_some(item)); + Ok(acc) + }) + } + + fn try_map(mut self, mut f: impl FnMut(Self::Item) -> Result) -> Result + where + Self: Sized, + R: Default + Extend + Sized, { + self.try_fold(default(), |mut acc: R, item| { + acc.extend_one(f(item)?); + Ok(acc) + }) + } +} + +impl IteratorExt for I {} + +pub trait TryIteratorExt: Iterator { + type Ok; + fn try_collect_vec(self) -> Result>; +} + +impl TryIteratorExt for T +where + T: Iterator>, + E: Into, +{ + type Ok = U; + fn try_collect_vec(self) -> Result> { + self.map(|i| i.anyhow_err()).collect::>>() + } +} + +pub trait ExactDoubleEndedIteratorExt: ExactSizeIterator + DoubleEndedIterator + Sized { + fn take_last_n(self, n: usize) -> Rev>> { + self.rev().take(n).rev() + } +} + +impl ExactDoubleEndedIteratorExt for T where T: ExactSizeIterator + DoubleEndedIterator {} diff --git a/build/ci_utils/src/extensions/maps.rs b/build/ci_utils/src/extensions/maps.rs new file mode 100644 index 0000000000..0894c868af --- /dev/null +++ b/build/ci_utils/src/extensions/maps.rs @@ -0,0 +1,24 @@ +use crate::prelude::*; + +use std::collections::HashMap; + + + +// trait Foo<'a, K, V> = FnOnce(&'a K) -> Future>; + +pub async fn get_or_insert(map: &mut HashMap, key: K, f: F) -> Result<&V> +where + K: Eq + Hash, + // TODO [mwu] It would be better if R would be allowed to live only for 'a lifetime. + // No idea how to express this. + for<'a> F: FnOnce(&'a K) -> R, + R: Future>, { + use std::collections::hash_map::Entry; + match map.entry(key) { + Entry::Occupied(occupied) => Ok(occupied.into_mut()), + Entry::Vacant(vacant) => { + let value = f(vacant.key()).await?; + Ok(vacant.insert(value)) + } + } +} diff --git a/build/ci_utils/src/extensions/octocrab.rs b/build/ci_utils/src/extensions/octocrab.rs new file mode 100644 index 0000000000..6a036ee3e0 --- /dev/null +++ b/build/ci_utils/src/extensions/octocrab.rs @@ -0,0 +1,9 @@ +use crate::prelude::*; + + + +#[async_trait] +pub trait OctocrabExt {} + +#[async_trait] +impl OctocrabExt for Octocrab {} diff --git a/build/ci_utils/src/extensions/os.rs b/build/ci_utils/src/extensions/os.rs new file mode 100644 index 0000000000..3abce4d97e --- /dev/null +++ b/build/ci_utils/src/extensions/os.rs @@ -0,0 +1,74 @@ +use crate::prelude::*; + + + +/// A bunch of constant literals associated with a given OS. Follows the convention of constants +/// defined in [`std::env::consts`] module. +pub trait OsExt: Copy { + fn exe_suffix(self) -> &'static str; + fn exe_extension(self) -> &'static str; + fn dll_prefix(self) -> &'static str; + fn dll_extension(self) -> &'static str; + fn dll_suffix(self) -> &'static str; +} + +impl const OsExt for OS { + fn exe_suffix(self) -> &'static str { + match self { + OS::Windows => ".exe", + OS::Linux => "", + OS::MacOS => "", + _ => todo!(), + } + } + + fn exe_extension(self) -> &'static str { + match self { + OS::Windows => "exe", + OS::Linux => "", + OS::MacOS => "", + _ => todo!(), + } + } + + fn dll_prefix(self) -> &'static str { + match self { + OS::Windows => "", + OS::Linux => "lib", + OS::MacOS => "lib", + _ => todo!(), + } + } + + fn dll_extension(self) -> &'static str { + match self { + OS::Windows => "dll", + OS::Linux => "so", + OS::MacOS => "dylib", + _ => todo!(), + } + } + + fn dll_suffix(self) -> &'static str { + match self { + OS::Windows => ".dll", + OS::Linux => ".so", + OS::MacOS => ".dylib", + _ => todo!(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn same_on_target() { + assert_eq!(std::env::consts::DLL_EXTENSION, TARGET_OS.dll_extension()); + assert_eq!(std::env::consts::DLL_PREFIX, TARGET_OS.dll_prefix()); + assert_eq!(std::env::consts::DLL_SUFFIX, TARGET_OS.dll_suffix()); + assert_eq!(std::env::consts::EXE_EXTENSION, TARGET_OS.exe_extension()); + assert_eq!(std::env::consts::EXE_SUFFIX, TARGET_OS.exe_suffix()); + } +} diff --git a/build/ci_utils/src/extensions/output.rs b/build/ci_utils/src/extensions/output.rs new file mode 100644 index 0000000000..a0b5df6e89 --- /dev/null +++ b/build/ci_utils/src/extensions/output.rs @@ -0,0 +1,48 @@ +use crate::prelude::*; + +use anyhow::Context; + + + +pub trait OutputExt { + fn single_line_stdout(&self) -> Result; + //fn run_ok(&self) -> Result; + fn describe(&self) -> String; + + fn stdout_as_str(&self) -> Result<&str>; + fn into_stdout_string(self) -> Result; +} + +impl OutputExt for std::process::Output { + fn single_line_stdout(&self) -> Result { + // self.run_ok()?; + let lines = non_empty_lines(&self.stdout)?.collect_vec(); + match lines.as_slice() { + [line] => Ok(line.to_string()), + other => bail!("Expected exactly 1 non-empty line. Found: {:?}", other), + } + } + + // fn run_ok(&self) -> Result { + // self.status.exit_ok().with_context(|| self.describe()) + // } + fn describe(&self) -> String { + format!( + "Stdout:\n{:?}\n\nStderr:\n{:?}\n", + std::str::from_utf8(&self.stdout).unwrap_or(""), + std::str::from_utf8(&self.stderr).unwrap_or(""), + ) + } + + fn stdout_as_str(&self) -> Result<&str> { + std::str::from_utf8(&self.stdout).context("The command stdout is not a valid text.") + } + + fn into_stdout_string(self) -> Result { + String::from_utf8(self.stdout).context("The command stdout is not a valid text.") + } +} + +pub fn non_empty_lines(bytes: &[u8]) -> Result> { + Ok(std::str::from_utf8(bytes)?.lines().map(str::trim).filter(|line| !line.is_empty())) +} diff --git a/build/ci_utils/src/extensions/path.rs b/build/ci_utils/src/extensions/path.rs new file mode 100644 index 0000000000..e1adc358c9 --- /dev/null +++ b/build/ci_utils/src/extensions/path.rs @@ -0,0 +1,100 @@ +use crate::prelude::*; + +use serde::de::DeserializeOwned; + + + +pub trait PathExt: AsRef { + fn join_iter>(&self, segments: impl IntoIterator) -> PathBuf { + let mut ret = self.as_ref().to_path_buf(); + ret.extend(segments); + ret + } + + /// Strips the leading `\\?\` prefix from Windows paths if present. + fn without_verbatim_prefix(&self) -> &Path { + self.as_str().strip_prefix(r"\\?\").map_or(self.as_ref(), Path::new) + } + + /// Appends a new extension to the file. + /// + /// Does not try to replace previous extension, unlike `set_extension`. + /// Does nothing when given extension string is empty. + /// + /// ``` + /// use ide_ci::extensions::path::PathExt; + /// use std::path::PathBuf; + /// + /// let path = PathBuf::from("foo.tar").with_appended_extension("gz"); + /// assert_eq!(path, PathBuf::from("foo.tar.gz")); + /// + /// let path = PathBuf::from("foo").with_appended_extension("zip"); + /// assert_eq!(path, PathBuf::from("foo.zip")); + /// ``` + fn with_appended_extension(&self, extension: impl AsRef) -> PathBuf { + if extension.as_ref().is_empty() { + return self.as_ref().into(); + } else { + let mut ret = self.as_ref().to_path_buf().into_os_string(); + ret.push("."); + ret.push(extension.as_ref()); + ret.into() + } + } + + #[context("Failed to deserialize file `{}` as type `{}`.", self.as_ref().display(), std::any::type_name::())] + fn read_to_json(&self) -> Result { + let content = crate::fs::read_to_string(self)?; + serde_json::from_str(&content).anyhow_err() + } + + fn write_as_json(&self, value: &T) -> Result { + trace!("Writing JSON to {}.", self.as_ref().display()); + let file = crate::fs::create(self)?; + serde_json::to_writer(file, value).anyhow_err() + } + + fn write_as_yaml(&self, value: &T) -> Result { + trace!("Writing YAML to {}.", self.as_ref().display()); + let file = crate::fs::create(self)?; + serde_yaml::to_writer(file, value).anyhow_err() + } + + fn as_str(&self) -> &str { + self.as_ref().to_str().unwrap() + } + + /// Split path to components and collect them into a new PathBuf. + /// + /// This is useful for `/` -> native separator conversion. + fn normalize(&self) -> PathBuf { + self.as_ref().components().collect() + } + + /// Like `parent` but provides a sensible error message if the path has no parent. + fn try_parent(&self) -> Result<&Path> { + self.as_ref() + .parent() + .with_context(|| format!("Failed to get parent of path `{}`.", self.as_ref().display())) + } +} + +impl> PathExt for T {} + +pub fn display_fmt(path: &Path, f: &mut Formatter) -> std::fmt::Result { + std::fmt::Display::fmt(&path.display(), f) +} + +#[cfg(test)] +mod tests { + #[allow(unused_imports)] + use super::*; + + #[test] + fn stripping_unc_prefix() { + let path_with_unc = Path::new(r"\\?\H:\NBO\ci-build\target\debug\enso-build2.exe"); + let path_without_unc = Path::new(r"H:\NBO\ci-build\target\debug\enso-build2.exe"); + assert_eq!(path_with_unc.without_verbatim_prefix(), path_without_unc); + assert_eq!(path_without_unc.without_verbatim_prefix(), path_without_unc); + } +} diff --git a/build/ci_utils/src/extensions/reqwest.rs b/build/ci_utils/src/extensions/reqwest.rs new file mode 100644 index 0000000000..b43ae94668 --- /dev/null +++ b/build/ci_utils/src/extensions/reqwest.rs @@ -0,0 +1,37 @@ +use crate::prelude::*; + +use mime::Mime; +use reqwest::header::HeaderMap; +use reqwest::header::HeaderName; +use reqwest::header::HeaderValue; +use reqwest::header::CONNECTION; +use reqwest::header::CONTENT_TYPE; + + + +pub trait ClientBuilderExt: Sized { + fn default_content_type(self, mime_type: Mime) -> Self; + fn keep_alive(self, seconds: usize) -> Self; + fn default_header(self, name: impl Into, value: impl Into) -> Self; +} + +impl ClientBuilderExt for reqwest::ClientBuilder { + fn default_content_type(self, mime_type: Mime) -> Self { + // We can safely unwrap, because we know that all mime types are in format that can be used + // as HTTP header value. + self.default_header(CONTENT_TYPE, HeaderValue::try_from(mime_type.as_ref()).unwrap()) + } + + fn keep_alive(self, seconds: usize) -> Self { + let mut header = HeaderMap::new(); + // We can safely unwrap, because we know that all mime types are in format that can be used + // as HTTP header value. + header.insert(CONNECTION, HeaderValue::from_static("Keep-Alive")); + header.insert(HeaderName::from_static("keep-alive"), HeaderValue::from(seconds)); + self.default_headers(header) + } + + fn default_header(self, name: impl Into, value: impl Into) -> Self { + self.default_headers(HeaderMap::from_iter([(name.into(), value.into())])) + } +} diff --git a/build/ci_utils/src/extensions/result.rs b/build/ci_utils/src/extensions/result.rs new file mode 100644 index 0000000000..ef8566dd2f --- /dev/null +++ b/build/ci_utils/src/extensions/result.rs @@ -0,0 +1,65 @@ +use crate::prelude::*; + + + +pub trait ResultExt: Sized { + #[allow(clippy::type_complexity)] + fn map_async<'a, T2, F, Fut>( + self, + f: F, + ) -> futures::future::Either< + futures::future::Map std::result::Result>, + std::future::Ready>, + > + where + F: FnOnce(T) -> Fut, + Fut: Future + 'a; + + fn and_then_async<'a, T2, E2, F, Fut>( + self, + f: F, + ) -> futures::future::Either>> + where + F: FnOnce(T) -> Fut, + Fut: Future> + Send + 'a, + E: Into, + T2: Send + 'a, + E2: Send + 'a; +} + +impl ResultExt for std::result::Result { + fn map_async<'a, T2, F, Fut>( + self, + f: F, + ) -> futures::future::Either< + futures::future::Map std::result::Result>, + std::future::Ready>, + > + where + F: FnOnce(T) -> Fut, + Fut: Future + 'a, + { + match self { + Ok(v) => f(v).map(Ok as fn(T2) -> std::result::Result).left_future(), + Err(e) => ready(Err(e)).right_future(), + } + } + + + fn and_then_async<'a, T2, E2, F, Fut>( + self, + f: F, + ) -> futures::future::Either>> + where + F: FnOnce(T) -> Fut, + Fut: Future> + Send + 'a, + E: Into, + T2: Send + 'a, + E2: Send + 'a, + { + match self { + Ok(v) => f(v).left_future(), + Err(e) => ready(Err(e.into())).right_future(), + } + } +} diff --git a/build/ci_utils/src/extensions/str.rs b/build/ci_utils/src/extensions/str.rs new file mode 100644 index 0000000000..22827cf0d6 --- /dev/null +++ b/build/ci_utils/src/extensions/str.rs @@ -0,0 +1,28 @@ +use crate::prelude::*; + +use anyhow::Context; +use std::any::type_name; + + + +pub trait StrLikeExt { + // FIXME: this needs better name! + fn parse2(&self) -> Result; + + fn parse_through(&self) -> Result + where + T: FromString + TryInto, + >::Error: Into, { + self.parse2::()?.try_into().anyhow_err().context(format!( + "Failed to convert {} => {}.", + type_name::(), + type_name::(), + )) + } +} + +impl> StrLikeExt for T { + fn parse2(&self) -> Result { + U::from_str(self.as_ref()) + } +} diff --git a/build/ci_utils/src/extensions/version.rs b/build/ci_utils/src/extensions/version.rs new file mode 100644 index 0000000000..1e261365ce --- /dev/null +++ b/build/ci_utils/src/extensions/version.rs @@ -0,0 +1,16 @@ +use crate::prelude::*; + + + +pub trait VersionExt { + fn triple(&self) -> (u64, u64, u64); + fn same_triple(&self, other: &Self) -> bool { + self.triple() == other.triple() + } +} + +impl VersionExt for Version { + fn triple(&self) -> (u64, u64, u64) { + (self.major, self.minor, self.patch) + } +} diff --git a/build/ci_utils/src/fmt.rs b/build/ci_utils/src/fmt.rs new file mode 100644 index 0000000000..31280a3525 --- /dev/null +++ b/build/ci_utils/src/fmt.rs @@ -0,0 +1,11 @@ +use crate::prelude::*; + + + +/// Formats list itself like a `Debug` but uses `ToString` to format elements. +pub fn display_list( + sequence: impl IntoIterator, + f: &mut Formatter, +) -> std::fmt::Result { + f.debug_list().entries(sequence.into_iter().map(|item| item.to_string())).finish() +} diff --git a/build/ci_utils/src/fs.rs b/build/ci_utils/src/fs.rs new file mode 100644 index 0000000000..fd5828248b --- /dev/null +++ b/build/ci_utils/src/fs.rs @@ -0,0 +1,309 @@ +use crate::prelude::*; + +use async_compression::tokio::bufread::GzipEncoder; +use async_compression::Level; +use fs_extra::dir::CopyOptions; +use std::fs::File; + + +// ============== +// === Export === +// ============== + +pub mod tokio; +pub mod wrappers; + +pub use wrappers::*; + + + +///////////////////////////// + +/// Like the standard version but will create any missing parent directories from the path. +#[context("Failed to write path: {}", path.as_ref().display())] +pub fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { + create_parent_dir_if_missing(&path)?; + wrappers::write(&path, &contents) +} + +/// Serialize the data to JSON text and write it to the file. +/// +/// See [`write`]. +#[context("Failed to write path: {}", path.as_ref().display())] +pub fn write_json(path: impl AsRef, contents: &impl Serialize) -> Result { + let contents = serde_json::to_string(contents)?; + write(&path, &contents) +} + +/// Like the standard version but will create any missing parent directories from the path. +#[context("Failed to open path for writing: {}", path.as_ref().display())] +pub fn create(path: impl AsRef) -> Result { + create_parent_dir_if_missing(&path)?; + wrappers::create(&path) +} + +/////////////////////////// + +#[context("Failed to read the file: {}", path.as_ref().display())] +pub fn read_string_into(path: impl AsRef) -> Result { + read_to_string(&path)?.parse2() +} + +/// Create a directory (and all missing parent directories), +/// +/// Does not fail when a directory already exists. +#[context("Failed to create directory {}", path.as_ref().display())] +pub fn create_dir_if_missing(path: impl AsRef) -> Result { + let result = std::fs::create_dir_all(&path); + match result { + Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => Ok(()), + result => result.anyhow_err(), + } +} + +/// Create a parent directory for path (and all missing parent directories), +/// +/// Does not fail when a directory already exists. +#[context("Failed to create parent directory for {}", path.as_ref().display())] +pub fn create_parent_dir_if_missing(path: impl AsRef) -> Result { + if let Some(parent) = path.as_ref().parent() { + create_dir_if_missing(parent)?; + Ok(parent.into()) + } else { + bail!("No parent directory for path {}.", path.as_ref().display()) + } +} + +/// Remove a directory with all its subtree. +/// +/// Does not fail if the directory is not found. +#[tracing::instrument(fields(path = %path.as_ref().display()))] +#[context("Failed to remove directory {}", path.as_ref().display())] +pub fn remove_dir_if_exists(path: impl AsRef) -> Result { + let result = std::fs::remove_dir_all(&path); + match result { + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), + result => result.anyhow_err(), + } +} + +/// Remove a regular file. +/// +/// Does not fail if the file is not found. +#[tracing::instrument(fields(path = %path.as_ref().display()))] +#[context("Failed to remove file {}", path.as_ref().display())] +pub fn remove_file_if_exists(path: impl AsRef) -> Result<()> { + let result = std::fs::remove_file(&path); + match result { + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), + result => result.anyhow_err(), + } +} + +/// Remove a file being either directory or regular file.. +/// +/// Does not fail if the file is not found. +#[context("Failed to remove entry {} (if exists)", path.as_ref().display())] +pub fn remove_if_exists(path: impl AsRef) -> Result { + let path = path.as_ref(); + if path.is_dir() { + remove_dir_if_exists(path) + } else { + remove_file_if_exists(path) + } +} + +#[context("Failed to create symlink {} => {}", src.as_ref().display(), dst.as_ref().display())] +pub fn symlink_auto(src: impl AsRef, dst: impl AsRef) -> Result { + create_parent_dir_if_missing(&dst)?; + symlink::symlink_auto(&src, &dst).anyhow_err() +} + +/// Recreate directory, so it exists and is empty. +pub fn reset_dir(path: impl AsRef) -> Result { + let path = path.as_ref(); + debug!("Will reset directory {}", path.display()); + remove_dir_if_exists(path)?; + create_dir_if_missing(path)?; + Ok(()) +} + +pub fn require_exist(path: impl AsRef) -> Result { + if path.as_ref().exists() { + trace!("{} does exist.", path.as_ref().display()); + Ok(()) + } else { + bail!("{} does not exist.", path.as_ref().display()) + } +} + +#[tracing::instrument(skip_all, fields( + src = %source_file.as_ref().display(), + dest = %dest_dir.as_ref().display()), + err)] +pub fn copy_to(source_file: impl AsRef, dest_dir: impl AsRef) -> Result { + require_exist(&source_file)?; + create_dir_if_missing(dest_dir.as_ref())?; + debug!("Will copy {} to {}", source_file.as_ref().display(), dest_dir.as_ref().display()); + let mut options = CopyOptions::new(); + options.overwrite = true; + fs_extra::copy_items(&[source_file], dest_dir, &options)?; + Ok(()) +} + + +#[tracing::instrument(skip_all, fields( + src = %source_file.as_ref().display(), + dest = %destination_file.as_ref().display()), + err)] +pub fn copy(source_file: impl AsRef, destination_file: impl AsRef) -> Result { + let source_file = source_file.as_ref(); + let destination_file = destination_file.as_ref(); + debug!("Will copy {} => {}", source_file.display(), destination_file.display()); + if let Some(parent) = destination_file.parent() { + create_dir_if_missing(parent)?; + if source_file.is_dir() { + let mut options = fs_extra::dir::CopyOptions::new(); + options.overwrite = true; + options.content_only = true; + fs_extra::dir::copy(source_file, destination_file, &options)?; + } else { + wrappers::copy(source_file, destination_file)?; + } + } else { + bail!("Cannot copy to the root path: {}", destination_file.display()); + } + Ok(()) +} + +pub fn same_existing_path(source: impl AsRef, destination: impl AsRef) -> Result { + Ok(canonicalize(source)? == canonicalize(destination)?) +} + +pub async fn mirror_directory(source: impl AsRef, destination: impl AsRef) -> Result { + create_dir_if_missing(destination.as_ref())?; + + // Robocopy seems to waste much time when running with the same path as source and destination. + if same_existing_path(&source, &destination)? { + return Ok(()); + } + + if TARGET_OS == OS::Windows { + crate::programs::robocopy::mirror_directory(source, destination).await + } else { + crate::programs::rsync::mirror_directory(source, destination).await + } +} + +#[context("Failed because the path does not point to a directory: {}", path.as_ref().display())] +pub fn expect_dir(path: impl AsRef) -> Result { + let filetype = metadata(&path)?.file_type(); + if filetype.is_dir() { + Ok(()) + } else { + bail!("File is not directory, its type is: {filetype:?}") + } +} + + +#[context("Failed because the path does not point to a regular file: {}", path.as_ref().display())] +pub fn expect_file(path: impl AsRef) -> Result { + let filetype = metadata(&path)?.file_type(); + if filetype.is_file() { + Ok(()) + } else { + bail!("File is not a regular file, its type is: {filetype:?}") + } +} + +#[cfg(not(target_os = "windows"))] +#[context("Failed to update permissions on `{}`", path.as_ref().display())] +pub fn allow_owner_execute(path: impl AsRef) -> Result { + use crate::anyhow::ResultExt; + use std::os::unix::prelude::*; + debug!("Setting executable permission on {}", path.as_ref().display()); + let metadata = path.as_ref().metadata()?; + let mut permissions = metadata.permissions(); + let mode = permissions.mode(); + let owner_can_execute = 0o0100; + permissions.set_mode(mode | owner_can_execute); + std::fs::set_permissions(path.as_ref(), permissions).anyhow_err() +} + +#[cfg(target_os = "windows")] +#[context("Failed to update permissions on `{}`", path.as_ref().display())] +pub fn allow_owner_execute(path: impl AsRef) -> Result { + // No-op on Windows. + Ok(()) +} + +/// Get the size of a file after gzip compression. +pub async fn compressed_size(path: impl AsRef) -> Result { + let file = ::tokio::io::BufReader::new(crate::fs::tokio::open(&path).await?); + let encoded_stream = GzipEncoder::with_quality(file, Level::Best); + crate::io::read_length(encoded_stream).await.map(into) +} + +pub fn check_if_identical(source: impl AsRef, target: impl AsRef) -> bool { + (|| -> Result { + #[allow(clippy::if_same_then_else)] // should be different after TODO + if metadata(&source)?.len() == metadata(&target)?.len() { + Ok(true) + } else if read(&source)? == read(&target)? { + // TODO: Not good for large files, should process them chunk by chunk. + Ok(true) + } else { + Ok(false) + } + })() + .unwrap_or(false) +} + +pub fn copy_file_if_different(source: impl AsRef, target: impl AsRef) -> Result { + if !check_if_identical(&source, &target) { + trace!( + "Modified, will copy {} to {}.", + source.as_ref().display(), + target.as_ref().display() + ); + copy(&source, &target)?; + } else { + trace!("No changes, skipping {}.", source.as_ref().display()) + } + Ok(()) +} + +#[tracing::instrument(skip_all, fields( + src = %source.as_ref().display(), + dest = %target.as_ref().display()), + err)] +pub async fn copy_if_different(source: impl AsRef, target: impl AsRef) -> Result { + if tokio::metadata(&source).await?.is_file() { + return copy_file_if_different(source, target); + } + + let walkdir = walkdir::WalkDir::new(&source); + let entries = walkdir.into_iter().try_collect_vec()?; + for entry in entries.into_iter().filter(|e| e.file_type().is_file()) { + let entry_path = entry.path(); + let relative_path = pathdiff::diff_paths(entry_path, &source) + .context(format!("Failed to relativize path {}.", entry_path.display()))?; + copy_file_if_different(entry_path, target.as_ref().join(relative_path))?; + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::log::setup_logging; + use ::tokio; + + #[tokio::test] + #[ignore] + async fn copy_if_different_test() -> Result { + setup_logging()?; + copy_if_different("../../..", r"C:\temp\out").await?; + Ok(()) + } +} diff --git a/build/ci_utils/src/fs/tokio.rs b/build/ci_utils/src/fs/tokio.rs new file mode 100644 index 0000000000..544889a587 --- /dev/null +++ b/build/ci_utils/src/fs/tokio.rs @@ -0,0 +1,96 @@ +use crate::prelude::*; + +use tokio::fs::File; +use tokio::io::AsyncRead; + + +// ============== +// === Export === +// ============== + +pub use crate::fs::wrappers::tokio::*; + + + +/// Like the standard version but will create any missing parent directories from the path. +#[context("Failed to open path for writing: {}", path.as_ref().display())] +pub async fn create(path: impl AsRef) -> Result { + create_parent_dir_if_missing(&path).await?; + crate::fs::wrappers::tokio::create(&path).await +} + +/// Create a directory (and all missing parent directories), +/// +/// Does not fail when a directory already exists. +#[context("Failed to create directory {}", path.as_ref().display())] +pub async fn create_dir_if_missing(path: impl AsRef) -> Result { + let result = tokio::fs::create_dir_all(&path).await; + match result { + Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => { + trace!("Directory already exists: {}", path.as_ref().display()); + Ok(()) + } + result => { + trace!("Created directory: {}", path.as_ref().display()); + result.anyhow_err() + } + } +} + +/// Create a parent directory for path (and all missing parent directories), +/// +/// Does not fail when a directory already exists. +#[context("Failed to create parent directory for {}", path.as_ref().display())] +pub async fn create_parent_dir_if_missing(path: impl AsRef) -> Result { + if let Some(parent) = path.as_ref().parent() { + create_dir_if_missing(parent).await?; + Ok(parent.into()) + } else { + bail!("No parent directory for path {}.", path.as_ref().display()) + } +} + +pub async fn copy_to_file( + mut content: impl AsyncRead + Unpin, + output_path: impl AsRef, +) -> Result { + let mut output = create(output_path).await?; + tokio::io::copy(&mut content, &mut output).await.anyhow_err() +} + +/// Remove a directory with all its subtree. +/// +/// Does not fail if the directory is not found. +#[instrument(fields(path = %path.as_ref().display()), err, level = "trace")] +pub async fn remove_dir_if_exists(path: impl AsRef) -> Result { + let path = path.as_ref(); + let result = tokio::fs::remove_dir_all(&path).await; + match result { + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), + result => result.context(format!("Failed to remove directory {}.", path.display())), + } +} + +/// Recreate directory, so it exists and is empty. +pub async fn reset_dir(path: impl AsRef) -> Result { + let path = path.as_ref(); + remove_dir_if_exists(&path).await?; + create_dir_if_missing(&path).await?; + Ok(()) +} + +pub async fn write_iter( + path: impl AsRef, + iter: impl IntoIterator>, +) -> Result { + let mut file = create(&path).await?; + for line in iter { + file.write_all(line.as_ref()) + .await + .with_context(|| format!("Failed to write to file {}.", path.as_ref().display()))?; + } + file.flush().await.with_context(|| { + format!("Failed to flush file {} after writing.", path.as_ref().display()) + })?; + Ok(()) +} diff --git a/build/ci_utils/src/fs/wrappers.rs b/build/ci_utils/src/fs/wrappers.rs new file mode 100644 index 0000000000..691cf41c2c --- /dev/null +++ b/build/ci_utils/src/fs/wrappers.rs @@ -0,0 +1,82 @@ +//! Wrappers over [`std::fs`] functions that provide sensible error messages, i.e. explaining what +//! operation was attempted and what was the relevant path. + +use crate::prelude::*; + +use std::fs::File; +use std::fs::Metadata; +use std::io::Write; + + +// ============== +// === Export === +// ============== + +pub mod tokio; + + + +#[context("Failed to obtain metadata for file: {}", path.as_ref().display())] +pub fn metadata>(path: P) -> Result { + std::fs::metadata(&path).anyhow_err() +} + +#[context("Failed to copy file from {} to {}", from.as_ref().display(), to.as_ref().display())] +pub fn copy(from: impl AsRef, to: impl AsRef) -> Result { + std::fs::copy(&from, &to).anyhow_err() +} + +#[context("Failed to rename file from {} to {}", from.as_ref().display(), to.as_ref().display())] +pub fn rename(from: impl AsRef, to: impl AsRef) -> Result { + std::fs::rename(&from, &to).anyhow_err() +} + +#[context("Failed to read the file: {}", path.as_ref().display())] +pub fn read(path: impl AsRef) -> Result> { + std::fs::read(&path).anyhow_err() +} + +#[context("Failed to read the directory: {}", path.as_ref().display())] +pub fn read_dir(path: impl AsRef) -> Result { + std::fs::read_dir(&path).anyhow_err() +} + +#[context("Failed to read the file: {}", path.as_ref().display())] +pub fn read_to_string(path: impl AsRef) -> Result { + std::fs::read_to_string(&path).anyhow_err() +} + +#[context("Failed to write path: {}", path.as_ref().display())] +pub fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { + std::fs::write(&path, contents).anyhow_err() +} + +pub fn append(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { + std::fs::OpenOptions::new() + .append(true) + .create(true) + .open(&path) + .context(format!("Failed to open {} for writing.", path.as_ref().display()))? + .write_all(contents.as_ref()) + .context(format!("Failed to write to {}.", path.as_ref().display())) +} + +#[context("Failed to open path for reading: {}", path.as_ref().display())] +pub fn open(path: impl AsRef) -> Result { + File::open(&path).anyhow_err() +} + +#[context("Failed to open path for writing: {}", path.as_ref().display())] +pub fn create(path: impl AsRef) -> Result { + File::create(&path).anyhow_err() +} + +#[context("Failed to canonicalize path: {}", path.as_ref().display())] +pub fn canonicalize(path: impl AsRef) -> Result { + std::fs::canonicalize(&path).anyhow_err() +} + +#[context("Failed to create missing directories no path: {}", path.as_ref().display())] +pub fn create_dir_all(path: impl AsRef) -> Result { + std::fs::create_dir_all(&path).anyhow_err() +} diff --git a/build/ci_utils/src/fs/wrappers/tokio.rs b/build/ci_utils/src/fs/wrappers/tokio.rs new file mode 100644 index 0000000000..2c85d5bf42 --- /dev/null +++ b/build/ci_utils/src/fs/wrappers/tokio.rs @@ -0,0 +1,54 @@ +use crate::prelude::*; + +use tokio::fs::File; +use tokio::io::AsyncReadExt; + + + +#[context("Failed to obtain metadata for file: {}", path.as_ref().display())] +pub async fn metadata>(path: P) -> Result { + tokio::fs::metadata(&path).await.anyhow_err() +} + +#[context("Failed to open path for reading: {}", path.as_ref().display())] +pub async fn open(path: impl AsRef) -> Result { + File::open(&path).await.anyhow_err() +} + +#[context("Failed to open path for writing: {}", path.as_ref().display())] +pub async fn create(path: impl AsRef) -> Result { + File::create(&path).await.anyhow_err() +} + +#[context("Failed to create missing directories no path: {}", path.as_ref().display())] +pub async fn create_dir_all(path: impl AsRef) -> Result { + tokio::fs::create_dir_all(&path).await.anyhow_err() +} + +#[context("Failed to read the directory: {}", path.as_ref().display())] +pub async fn read_dir(path: impl AsRef) -> Result { + tokio::fs::read_dir(&path).await.anyhow_err() +} + +#[context("Failed to remove directory with the subtree: {}", path.as_ref().display())] +pub async fn remove_dir_all(path: impl AsRef) -> Result { + tokio::fs::remove_dir_all(&path).await.anyhow_err() +} + +#[context("Failed to write file: {}", path.as_ref().display())] +pub async fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { + tokio::fs::write(&path, &contents).await.anyhow_err() +} + +#[context("Failed to read file: {}", path.as_ref().display())] +pub async fn read>(path: P) -> Result> { + let mut file = File::open(&path).await?; + let mut contents = Vec::new(); + file.read_to_end(&mut contents).await?; + Ok(contents) +} + +#[context("Failed to read the file: {}", path.as_ref().display())] +pub async fn read_to_string(path: impl AsRef) -> Result { + tokio::fs::read_to_string(&path).await.anyhow_err() +} diff --git a/build/ci_utils/src/future.rs b/build/ci_utils/src/future.rs new file mode 100644 index 0000000000..cf5bc7d4f7 --- /dev/null +++ b/build/ci_utils/src/future.rs @@ -0,0 +1,94 @@ +use crate::prelude::*; + +use futures_util::future::OptionFuture; + + + +#[derive(Copy, Clone, Debug)] +pub enum AsyncPolicy { + Sequential, + FutureParallelism, + TaskParallelism, +} + +pub async fn join_all>, T, E>( + futures: I, + parallel: AsyncPolicy, +) -> Vec> +where + I: IntoIterator, + F: Send + 'static, + T: Send + 'static, + E: Into + Send + 'static, +{ + match parallel { + AsyncPolicy::Sequential => { + let mut ret = Vec::new(); + for future in futures { + ret.push(future.await.anyhow_err()); + } + ret + } + AsyncPolicy::FutureParallelism => + futures::future::join_all(futures).await.into_iter().map(|r| r.anyhow_err()).collect(), + AsyncPolicy::TaskParallelism => { + let tasks = futures + .into_iter() + .map(|future| async move { tokio::task::spawn(future).await?.anyhow_err() }); + futures::future::join_all(tasks).await + } + } +} + +pub async fn try_join_all>, T, E>( + futures: I, + parallel: AsyncPolicy, +) -> Result> +where + I: IntoIterator, + F: Send + 'static, + T: Send + 'static, + E: Into + Send + 'static, +{ + match parallel { + AsyncPolicy::Sequential => { + let mut ret = Vec::new(); + for future in futures { + ret.push(future.await.anyhow_err()?); + } + Ok(ret) + } + AsyncPolicy::FutureParallelism => futures::future::try_join_all(futures).await.anyhow_err(), + AsyncPolicy::TaskParallelism => { + let tasks = futures + .into_iter() + .map(|future| async move { tokio::task::spawn(future).await?.anyhow_err() }); + futures::future::try_join_all(tasks).await + } + } +} + +pub fn perhaps(should_do: bool, f: impl FnOnce() -> F) -> OptionFuture { + should_do.then(f).into() +} + +// pub fn perhaps_spawn_try<'a, F>( +// should_do: bool, +// f: impl FnOnce() -> F + 'a, +// ) -> BoxFuture<'static, Result>> +// where +// F: TryFuture + Send + 'static, +// F::Ok: Send + 'static, +// F::Error: Send + Sync + 'static, +// anyhow::Error: From, +// { +// let job = should_do.then(|| tokio::spawn(f().into_future())); +// async move { +// if let Some(job) = job { +// Ok(Some(job.await??)) +// } else { +// Ok(None) +// } +// } +// .boxed() +// } diff --git a/build/ci_utils/src/github.rs b/build/ci_utils/src/github.rs new file mode 100644 index 0000000000..78e5eed1b1 --- /dev/null +++ b/build/ci_utils/src/github.rs @@ -0,0 +1,284 @@ +use crate::prelude::*; + +use crate::cache::download::DownloadFile; + +use anyhow::Context; +use headers::HeaderMap; +use headers::HeaderValue; +use octocrab::models::repos::Asset; +use octocrab::models::repos::Release; +use octocrab::models::workflows::WorkflowListArtifact; +use octocrab::models::ArtifactId; +use octocrab::models::AssetId; +use octocrab::models::ReleaseId; +use octocrab::models::RunId; +use octocrab::params::actions::ArchiveFormat; +use reqwest::Response; + + + +const MAX_PER_PAGE: u8 = 100; + +pub mod model; +pub mod release; + +/// Goes over all the pages and returns result. +/// +/// We prefer taking a future page result rather than page itself to be able to easily wrap both +/// actions with a single Result context. +// TODO [mwu]: Yielding a Stream that fetches pages as-needed would be better. +pub async fn get_all( + client: &Octocrab, + f: impl Future>>, +) -> octocrab::Result> { + let first_page = f.await?; + client.all_pages(first_page).await +} + +/// Entity that uniquely identifies a GitHub-hosted repository. +#[async_trait] +pub trait RepoPointer: Display { + fn owner(&self) -> &str; + fn name(&self) -> &str; + + /// Generate a token that can be used to register a new runner for this repository. + async fn generate_runner_registration_token( + &self, + octocrab: &Octocrab, + ) -> Result { + let path = + iformat!("/repos/{self.owner()}/{self.name()}/actions/runners/registration-token"); + let url = octocrab.absolute_url(path)?; + octocrab.post(url, EMPTY_REQUEST_BODY).await.context(format!( + "Failed to generate a runner registration token for the {self} repository." + )) + } + + /// The repository's URL. + fn url(&self) -> Result { + let url_text = iformat!("https://github.com/{self.owner()}/{self.name()}"); + Url::parse(&url_text) + .context(format!("Failed to generate an URL for the {self} repository.")) + } + + fn repos<'a>(&'a self, client: &'a Octocrab) -> octocrab::repos::RepoHandler<'a> { + client.repos(self.owner(), self.name()) + } + + async fn all_releases(&self, client: &Octocrab) -> Result> { + get_all(client, self.repos(client).releases().list().per_page(MAX_PER_PAGE).send()) + .await + .context(format!("Failed to list all releases in the {self} repository.")) + } + + async fn latest_release(&self, client: &Octocrab) -> Result { + self.repos(client) + .releases() + .get_latest() + .await + .context(format!("Failed to get the latest release in the {self} repository.")) + } + + async fn find_release_by_id( + &self, + client: &Octocrab, + release_id: ReleaseId, + ) -> Result { + let repo_handler = self.repos(client); + let releases_handler = repo_handler.releases(); + releases_handler + .get_by_id(release_id) + .await + .context(format!("Failed to find release by id `{release_id}` in `{self}`.")) + } + + #[tracing::instrument(skip(client), fields(%self, %text), err)] + async fn find_release_by_text(&self, client: &Octocrab, text: &str) -> anyhow::Result { + self.all_releases(client) + .await? + .into_iter() + .find(|release| release.tag_name.contains(text)) + .inspect(|release| info!("Found release at: {} (id={}).", release.html_url, release.id)) + .context(format!("No release with tag matching `{text}` in {self}.")) + } + + #[tracing::instrument(skip(client), fields(%self, %run_id, %name), err, ret)] + async fn find_artifact_by_name( + &self, + client: &Octocrab, + run_id: RunId, + name: &str, + ) -> Result { + let artifacts = client + .actions() + .list_workflow_run_artifacts(self.owner(), self.name(), run_id) + .per_page(100) + .send() + .await + .context(format!("Failed to list artifacts of run {run_id} in {self}."))? + .value + .context("Failed to find any artifacts.")?; + + artifacts + .into_iter() + .find(|artifact| artifact.name == name) + .context(format!("Failed to find artifact by name '{name}'.")) + } + + async fn download_artifact(&self, client: &Octocrab, artifact_id: ArtifactId) -> Result { + client + .actions() + .download_artifact(self.owner(), self.name(), artifact_id, ArchiveFormat::Zip) + .await + .context(format!("Failed to download artifact with ID={artifact_id}.")) + } + + async fn download_and_unpack_artifact( + &self, + client: &Octocrab, + artifact_id: ArtifactId, + output_dir: &Path, + ) -> Result { + let bytes = self.download_artifact(client, artifact_id).await?; + crate::archive::zip::extract_bytes(bytes, output_dir)?; + Ok(()) + } + + #[tracing::instrument(name="Get the asset information.", skip(client), fields(self=%self), err)] + async fn asset(&self, client: &Octocrab, asset_id: AssetId) -> Result { + self.repos(client).releases().get_asset(asset_id).await.anyhow_err() + } + + fn download_asset_job(&self, octocrab: &Octocrab, asset_id: AssetId) -> DownloadFile { + let path = iformat!("/repos/{self.owner()}/{self.name()}/releases/assets/{asset_id}"); + // Unwrap will work, because we are appending relative URL constant. + let url = octocrab.absolute_url(path).unwrap(); + DownloadFile { + client: octocrab.client.clone(), + key: crate::cache::download::Key { + url, + additional_headers: HeaderMap::from_iter([( + reqwest::header::ACCEPT, + HeaderValue::from_static(mime::APPLICATION_OCTET_STREAM.as_ref()), + )]), + }, + } + } + + #[tracing::instrument(name="Download the asset.", skip(client), fields(self=%self), err)] + async fn download_asset(&self, client: &Octocrab, asset_id: AssetId) -> Result { + self.download_asset_job(client, asset_id).send_request().await + } + + #[tracing::instrument(name="Download the asset to a file.", skip(client, output_path), fields(self=%self, dest=%output_path.as_ref().display()), err)] + async fn download_asset_as( + &self, + client: &Octocrab, + asset_id: AssetId, + output_path: impl AsRef + Send + Sync + 'static, + ) -> Result { + let response = self.download_asset(client, asset_id).await?; + crate::io::web::stream_response_to_file(response, &output_path).await + } + + #[tracing::instrument(name="Download the asset to a directory.", + skip(client, output_dir, asset), + fields(self=%self, dest=%output_dir.as_ref().display(), id = %asset.id), + err)] + async fn download_asset_to( + &self, + client: &Octocrab, + asset: &Asset, + output_dir: impl AsRef + Send + Sync + 'static, + ) -> Result { + let output_path = output_dir.as_ref().join(&asset.name); + self.download_asset_as(client, asset.id, output_path.clone()).await?; + Ok(output_path) + } +} + +#[async_trait] +pub trait OrganizationPointer { + /// Organization name. + fn name(&self) -> &str; + + /// Generate a token that can be used to register a new runner for this repository. + async fn generate_runner_registration_token( + &self, + octocrab: &Octocrab, + ) -> anyhow::Result { + let path = iformat!("/orgs/{self.name()}/actions/runners/registration-token"); + let url = octocrab.absolute_url(path)?; + octocrab.post(url, EMPTY_REQUEST_BODY).await.map_err(Into::into) + } + + /// The organization's URL. + fn url(&self) -> Result { + let url_text = iformat!("https://github.com/{self.name()}"); + Url::parse(&url_text).map_err(Into::into) + } +} + +/// Get the biggest asset containing given text. +#[instrument(skip(release), fields(id = %release.id, url = %release.url), err)] +pub fn find_asset_by_text<'a>(release: &'a Release, text: &str) -> anyhow::Result<&'a Asset> { + release + .assets + .iter() + .filter(|asset| asset.name.contains(text)) + .max_by_key(|asset| asset.size) + .ok_or_else(|| { + anyhow!("Cannot find release asset by string {} in the release {}.", text, release.url) + }) + .inspect(|asset| trace!("Found asset: {:#?}", asset)) +} + +/// Get the biggest asset containing given text. +#[instrument(skip(release), fields(id = %release.id, url = %release.url), ret(Display), err)] +pub fn find_asset_url_by_text<'a>(release: &'a Release, text: &str) -> anyhow::Result<&'a Url> { + let matching_asset = find_asset_by_text(release, text)?; + Ok(&matching_asset.browser_download_url) +} + +/// Obtain URL to an archive with the latest runner package for a given system. +/// +/// Octocrab client does not need to bo authorized with a PAT for this. However, being authorized +/// will help with GitHub API query rate limits. +pub async fn latest_runner_url(octocrab: &Octocrab, os: OS) -> anyhow::Result { + let latest_release = octocrab.repos("actions", "runner").releases().get_latest().await?; + + let os_name = match os { + OS::Linux => "linux", + OS::Windows => "win", + OS::MacOS => "osx", + other_os => unimplemented!("System `{}` is not yet supported!", other_os), + }; + + let arch_name = match TARGET_ARCH { + Arch::X86_64 => "x64", + Arch::Arm => "arm", + Arch::AArch64 => "arm64", + other_arch => unimplemented!("Architecture `{}` is not yet supported!", other_arch), + }; + + let platform_name = format!("{}-{}", os_name, arch_name); + find_asset_url_by_text(&latest_release, &platform_name).cloned() +} + +pub async fn fetch_runner(octocrab: &Octocrab, os: OS, output_dir: impl AsRef) -> Result { + let url = latest_runner_url(octocrab, os).await?; + crate::io::download_and_extract(url, output_dir).await +} + +/// Sometimes octocrab is just not enough. +/// +/// Client has set the authorization header. +pub fn create_client(pat: impl AsRef) -> Result { + let mut header_map = reqwest::header::HeaderMap::new(); + header_map.append(reqwest::header::AUTHORIZATION, format!("Bearer {}", pat.as_ref()).parse()?); + reqwest::Client::builder() + .user_agent("enso-build") + .default_headers(header_map) + .build() + .anyhow_err() +} diff --git a/build/ci_utils/src/github/model.rs b/build/ci_utils/src/github/model.rs new file mode 100644 index 0000000000..a43f1c53f8 --- /dev/null +++ b/build/ci_utils/src/github/model.rs @@ -0,0 +1,52 @@ +//! Structures in this module model the types used in GitHub REST API. + +use crate::prelude::*; + + + +/// Description of the self-hosted runner, element of the list runners response. +/// +/// See: +/// +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct Runner { + pub id: i32, + pub name: String, + pub os: String, + pub status: String, + pub busy: bool, + pub labels: Vec

(inner); + Self::from(inner) + } + + fn spawn(&mut self) -> Result { + self.borrow_mut().spawn().anyhow_err() + } +} + +pub trait IsCommandWrapper { + fn borrow_mut_command(&mut self) -> &mut tokio::process::Command; + + fn with_applied(mut self, manipulator: &M) -> Self + where Self: Sized { + manipulator.apply(&mut self); + self + } + + fn apply(&mut self, manipulator: &M) -> &mut Self { + manipulator.apply(self); + self + } + + fn apply_iter(&mut self, iter: impl IntoIterator) -> &mut Self { + for manipulator in iter { + self.apply(&manipulator); + } + self + } + + fn apply_opt(&mut self, manipulator: Option<&M>) -> &mut Self { + if let Some(m) = manipulator { + self.apply(m); + } + self + } + + fn try_applying(&mut self, manipulator: &M) -> Result<&mut Self> { + manipulator.try_applying(self).map(|_| self) + } + + fn set_env + ?Sized>( + &mut self, + variable: T, + value: &V, + ) -> Result<&mut Self> { + self.env(variable.name(), variable.generate(value.borrow())?); + Ok(self) + } + + fn set_env_opt + ?Sized>( + &mut self, + variable: T, + value: Option<&V>, + ) -> Result<&mut Self> { + if let Some(value) = value { + self.set_env(variable, value) + } else { + Ok(self) + } + } + + /////////// + + fn arg>(&mut self, arg: S) -> &mut Self { + self.borrow_mut_command().arg(arg); + self + } + + fn args(&mut self, args: I) -> &mut Self + where + I: IntoIterator, + S: AsRef, { + self.borrow_mut_command().args(args); + self + } + + fn env(&mut self, key: K, val: V) -> &mut Self + where + K: AsRef, + V: AsRef, { + self.borrow_mut_command().env(key, val); + self + } + + fn envs(&mut self, vars: I) -> &mut Self + where + I: IntoIterator, + K: AsRef, + V: AsRef, { + self.borrow_mut_command().envs(vars); + self + } + + fn env_remove>(&mut self, key: K) -> &mut Self { + self.borrow_mut_command().env_remove(key); + self + } + + fn env_clear(&mut self) -> &mut Self { + self.borrow_mut_command().env_clear(); + self + } + + fn current_dir>(&mut self, dir: Pa) -> &mut Self { + self.borrow_mut_command().current_dir(dir); + self + } + + fn stdin>(&mut self, cfg: T) -> &mut Self { + self.borrow_mut_command().stdin(cfg); + self + } + + fn stdout>(&mut self, cfg: T) -> &mut Self { + self.borrow_mut_command().stdout(cfg); + self + } + + fn stderr>(&mut self, cfg: T) -> &mut Self { + self.borrow_mut_command().stderr(cfg); + self + } + + fn kill_on_drop(&mut self, kill_on_drop: bool) -> &mut Self { + self.borrow_mut_command().kill_on_drop(kill_on_drop); + self + } + + #[cfg(windows)] + #[cfg_attr(docsrs, doc(cfg(windows)))] + fn creation_flags(&mut self, flags: u32) -> &mut Self { + self.borrow_mut_command().creation_flags(flags); + self + } + + #[cfg(unix)] + #[cfg_attr(docsrs, doc(cfg(unix)))] + fn uid(&mut self, id: u32) -> &mut Self { + self.borrow_mut_command().uid(id); + self + } + + #[cfg(unix)] + #[cfg_attr(docsrs, doc(cfg(unix)))] + fn gid(&mut self, id: u32) -> &mut Self { + self.borrow_mut_command().gid(id); + self + } + + // fn spawn(&mut self) -> Result { + // self.borrow_mut_command().spawn().anyhow_err() + // } + // + // + // fn status(&mut self) -> BoxFuture<'static, Result> { + // let fut = self.borrow_mut_command().status(); + // async move { fut.await.anyhow_err() }.boxed() + // } + // + // fn output(&mut self) -> BoxFuture<'static, Result> { + // let fut = self.borrow_mut_command().output(); + // async move { fut.await.anyhow_err() }.boxed() + // } +} + +impl> IsCommandWrapper for T { + fn borrow_mut_command(&mut self) -> &mut tokio::process::Command { + self.borrow_mut() + } +} + +impl MyCommand

for Command { + fn new_program>(program: S) -> Self { + let inner = tokio::process::Command::new(program); + Self::new_over::

(inner) + } +} + +pub trait CommandOption { + fn arg(&self) -> Option<&str> { + None + } + fn args(&self) -> Vec<&str> { + vec![] + } +} + +pub struct Command { + pub inner: tokio::process::Command, + pub status_checker: Arc Result + Send + Sync>, +} + +impl Borrow for Command { + fn borrow(&self) -> &tokio::process::Command { + &self.inner + } +} + +impl BorrowMut for Command { + fn borrow_mut(&mut self) -> &mut tokio::process::Command { + &mut self.inner + } +} + +impl Debug for Command { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.inner) + } +} + +impl Command { + pub fn new>(program: S) -> Command { + let inner = tokio::process::Command::new(program); + let status_checker = Arc::new(|status: ExitStatus| status.exit_ok().anyhow_err()); + Self { inner, status_checker } + } + + pub fn new_over(inner: tokio::process::Command) -> Self { + Command { inner, status_checker: Arc::new(P::handle_exit_status) } + } + + pub fn spawn_intercepting(&mut self) -> Result { + self.stdout(Stdio::piped()); + self.stderr(Stdio::piped()); + + let program = self.inner.as_std().get_program(); + let program = Path::new(program).file_stem().unwrap_or_default().to_os_string(); + let program = program.to_string_lossy(); + + let mut child = self.spawn()?; + + // FIXME unwraps + spawn_log_processor(format!("{program}ℹ️"), child.stdout.take().unwrap()); + spawn_log_processor(format!("{program}⚠️"), child.stderr.take().unwrap()); + Ok(child) + } + + pub fn run_ok(&mut self) -> BoxFuture<'static, Result<()>> { + let pretty = self.describe(); + let span = info_span!( + "Running process.", + status = tracing::field::Empty, + pid = tracing::field::Empty, + command = tracing::field::Empty, + ) + .entered(); + let child = self.spawn_intercepting(); + let status_checker = self.status_checker.clone(); + async move { + let mut child = child?; + let status = child + .wait() + .inspect_ok(|exit_status| { + tracing::Span::current().record("status", exit_status.code()); + }) + .await?; + status_checker(status).context(format!("Command failed: {}", pretty)) + } + .instrument(span.exit()) + .boxed() + } + + pub fn output_ok(&mut self) -> BoxFuture<'static, Result> { + let pretty = self.describe(); + let span = info_span!( + "Running process for the output.", + status = tracing::field::Empty, + pid = tracing::field::Empty, + command = tracing::field::Empty, + ) + .entered(); + + self.stdout(Stdio::piped()); + self.stderr(Stdio::piped()); + let child = self.spawn(); + let status_checker = self.status_checker.clone(); + async move { + let child = child?; + let output = + child.wait_with_output().await.context("Failed while waiting for output.")?; + tracing::Span::current().record("status", output.status.code()); + status_checker(output.status).with_context(|| { + format!( + "Stdout:\n{}\n\nStderr:\n{}\n", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ) + })?; + Result::Ok(output) + } + .map_err(move |e| e.context(format!("Failed to get output of the command: {}", pretty))) + .instrument(span.exit()) + .boxed() + } + + pub fn run_stdout(&mut self) -> BoxFuture<'static, Result> { + let output = self.output_ok(); + async move { + output + .await? + .into_stdout_string() + .context("Failed to decode standard output as UTF8 text.") + } + .boxed() + } + + pub fn spawn(&mut self) -> Result { + let pretty = self.describe(); + + let current_span = tracing::Span::current(); + if current_span.field("command").is_some() { + tracing::Span::current().record("command", &field::display(&pretty)); + debug!("Spawning."); + } else { + debug!("Spawning {}.", pretty); + } + + self.inner.spawn().context(format!("Failed to spawn: {}", pretty)).inspect(|child| { + if let Some(pid) = child.id() { + current_span.record("pid", pid); + } + }) + } + + // pub fn status(&mut self) -> BoxFuture<'static, Result> { + // let fut = self.borrow_mut_command().status(); + // async move { fut.await.anyhow_err() }.boxed() + // } + // + // pub fn output(&mut self) -> BoxFuture<'static, Result> { + // let fut = self.borrow_mut_command().output(); + // async move { fut.await.anyhow_err() }.boxed() + // } +} + +impl Command { + pub fn with_arg(self, arg: impl AsRef) -> Self { + let mut this = self; + this.arg(arg); + this + } + + pub fn with_args(self, args: impl IntoIterator>) -> Self { + let mut this = self; + this.args(args); + this + } + + pub fn with_stdin(self, stdin: Stdio) -> Self { + let mut this = self; + this.stdin(stdin); + this + } + + pub fn with_stdout(self, stdout: Stdio) -> Self { + let mut this = self; + this.stdout(stdout); + this + } + + pub fn with_stderr(self, stderr: Stdio) -> Self { + let mut this = self; + this.stderr(stderr); + this + } + + pub fn with_current_dir(self, dir: impl AsRef) -> Self { + let mut this = self; + this.current_dir(dir); + this + } +} + +pub fn spawn_log_processor( + prefix: String, + out: impl AsyncRead + Send + Unpin + 'static, +) -> JoinHandle { + tokio::task::spawn( + async move { + info!("{prefix} "); + let bufread = BufReader::new(out); + let mut lines = bufread.split(b'\n'); + while let Some(line_bytes) = lines.next_segment().await? { + match String::from_utf8(line_bytes) { + Ok(line) => { + let line = line.trim_end_matches('\r'); + info!("{prefix} {line}"); + } + Err(e) => { + error!("{prefix} Failed to decode a line from output: {e}"); + warn!( + "{prefix} Raw buffer: {:?}. Decoded with placeholders: {}", + e.as_bytes(), + String::from_utf8_lossy(e.as_bytes()) + ); + } + } + } + info!("{prefix} "); + Result::Ok(()) + } + .inspect_err(|e| error!("Fatal error while processing process output: {e}")), + ) +} + +pub trait Manipulator { + fn apply(&self, command: &mut C); +} + +pub trait FallibleManipulator { + fn try_applying(&self, command: &mut C) -> Result; +} + + +#[cfg(test)] +mod tests { + // use super::*; + // use crate::global::new_spinner; + // // use crate::global::println; + // use tokio::io::AsyncBufReadExt; + // use tokio::io::AsyncRead; + // use tokio::io::BufReader; + // use tokio::process::ChildStdout; + // use tokio::task::JoinHandle; + + // pub fn spawn_log_processor( + // prefix: String, + // out: impl AsyncRead + Send + Unpin + 'static, + // ) -> JoinHandle { + // tokio::task::spawn(async move { + // let bufread = BufReader::new(out); + // let mut lines = bufread.lines(); + // while let Some(line) = lines.next_line().await? { + // println(format!("{} {}", prefix, line)) + // } + // println(format!("{} {}", prefix, "")); + // Result::Ok(()) + // }) + // }U + // + // pub fn spawn_logged(cmd: &mut Command) { + // cmd.stdout(Stdio::piped()); + // cmd.stderr(Stdio::piped()); + // } + // + // #[tokio::test] + // async fn test_cmd_out_interception() -> Result { + // pretty_env_logger::init(); + // let mut cmd = Command::new("cargo"); + // cmd.arg("update"); + // cmd.stdout(Stdio::piped()); + // cmd.stderr(Stdio::piped()); + // + // let mut child = cmd.spawn()?; + // spawn_log_processor("[out]".into(), child.stdout.take().unwrap()); + // spawn_log_processor("[err]".into(), child.stderr.take().unwrap()); + // let bar = new_spinner(format!("Running {:?}", cmd)); + // child.wait().await?; + // Ok(()) + // } + // + // #[tokio::test] + // async fn spawning() -> Result { + // println!("Start"); + // tokio::process::Command::new("python").spawn()?.wait().await?; + // println!("Finish"); + // Ok(()) + // } +} diff --git a/build/ci_utils/src/program/command/provider.rs b/build/ci_utils/src/program/command/provider.rs new file mode 100644 index 0000000000..9664e2f775 --- /dev/null +++ b/build/ci_utils/src/program/command/provider.rs @@ -0,0 +1,19 @@ +use crate::prelude::*; + + + +pub trait CommandProvider { + fn command(&self) -> Result; +} + +pub trait CommandProviderExt: CommandProvider { + fn call_arg(&self, arg: impl AsRef) -> BoxFuture<'static, Result> { + self.call_args(once(arg)) + } + + fn call_args(&self, args: impl IntoIterator>) -> BoxFuture<'static, Result> { + self.command().and_then_async(|mut cmd| cmd.args(args).run_ok()).boxed() + } +} + +impl CommandProviderExt for T {} diff --git a/build/ci_utils/src/program/location.rs b/build/ci_utils/src/program/location.rs new file mode 100644 index 0000000000..50a15df386 --- /dev/null +++ b/build/ci_utils/src/program/location.rs @@ -0,0 +1,35 @@ +use crate::prelude::*; + +use crate::program::command::MyCommand; + + + +#[derive(Clone, Debug, derive_more::Deref, derive_more::DerefMut, PartialEq, Eq)] +pub struct Location

{ + #[deref] + #[deref_mut] + pub executable_path: PathBuf, + pub phantom_data: PhantomData

, +} + +impl

AsRef for Location

{ + fn as_ref(&self) -> &Path { + &self.executable_path + } +} + +impl

AsRef for Location

{ + fn as_ref(&self) -> &OsStr { + self.executable_path.as_ref() + } +} + +impl Location

{ + pub fn new(path: impl Into) -> Self { + Self { executable_path: path.into(), phantom_data: default() } + } + + pub fn cmd(&self) -> P::Command { + P::Command::new_program(self) + } +} diff --git a/build/ci_utils/src/program/resolver.rs b/build/ci_utils/src/program/resolver.rs new file mode 100644 index 0000000000..06cc9dbf12 --- /dev/null +++ b/build/ci_utils/src/program/resolver.rs @@ -0,0 +1,44 @@ +use crate::prelude::*; + + + +#[derive(Debug)] +pub struct Resolver

{ + pub cwd: PathBuf, + pub names: Vec, + pub lookup_dirs: OsString, + pub phantom_data: PhantomData

, +} + +impl

Resolver

{ + pub fn new(names: Vec<&str>, fallback_dirs: Vec) -> Result { + let path = std::env::var_os("PATH").unwrap_or_default(); + let env_path_dirs = std::env::split_paths(&path); + let lookup_dirs = std::env::join_paths(env_path_dirs.chain(fallback_dirs))?; + let names = names.into_iter().map(OsString::from).collect(); + let cwd = std::env::current_dir()?; + let phantom_data = default(); + Ok(Resolver { cwd, names, lookup_dirs, phantom_data }) + } + pub fn lookup_all(self) -> impl Iterator { + let Self { names, lookup_dirs, cwd, phantom_data: _phantom_data } = self; + names + .into_iter() + .filter_map(move |name| { + // We discard this error, as "error finding program" is like "no program available". + which::which_in_all(name, Some(lookup_dirs.clone()), cwd.clone()).ok() + }) + .flatten() + } + + pub fn lookup(self) -> Result { + let empty = Cow::from(""); + let names = self.names.iter().map(|name| name.to_string_lossy()).collect_vec(); + let name = names.first().unwrap_or(&empty).to_string(); + let names = names.join(", "); + let locations = self.lookup_dirs.clone(); + self.lookup_all().next().ok_or_else(|| { + anyhow!("Failed to find a program `{}`. Recognized executable names: {}. Tested locations: {}", name, names, locations.to_string_lossy()) + }) + } +} diff --git a/build/ci_utils/src/program/shell.rs b/build/ci_utils/src/program/shell.rs new file mode 100644 index 0000000000..4a82da4ff9 --- /dev/null +++ b/build/ci_utils/src/program/shell.rs @@ -0,0 +1,48 @@ +use crate::prelude::*; + + + +pub trait Shell: Program { + fn run_command(&self) -> Result; + fn run_script(&self, script_path: impl AsRef) -> Result; + fn run_shell(&self) -> Result; +} + +// Deduces shell from file extension. +pub fn run_script(script_path: impl AsRef) -> Result { + let shell_kind = match script_path.as_ref().extension() { + Some(extension) => Recognized::from_extension(extension), + None => bail!( + "Cannot deduce shell for script {}. Missing file extension.", + script_path.as_ref().display() + ), + }?; + shell_kind.run_script(script_path) +} + + +#[derive(Copy, Clone, Debug)] +pub enum Recognized { + Command, + PowerShell, + Bash, +} + +impl Recognized { + pub fn from_extension(extension: impl AsRef) -> Result { + Ok(match extension.as_ref().to_string_lossy().as_ref() { + "cmd" | "bat" => Self::Command, + "ps1" | "pwsh" => Self::PowerShell, + "sh" => Self::Bash, + extension => bail!("Unrecognized shell script extension: {}.", extension), + }) + } + + pub fn run_script(self, script_path: impl AsRef) -> Result { + match self { + Recognized::Command => crate::programs::Cmd.run_script(script_path), + Recognized::PowerShell => crate::programs::PwSh.run_script(script_path), + Recognized::Bash => crate::programs::Bash.run_script(script_path), + } + } +} diff --git a/build/ci_utils/src/program/version.rs b/build/ci_utils/src/program/version.rs new file mode 100644 index 0000000000..862ec265e2 --- /dev/null +++ b/build/ci_utils/src/program/version.rs @@ -0,0 +1,76 @@ +use crate::prelude::*; + +use regex::Regex; +use std::sync::LazyLock; + + + +// Taken from the official semver description: +// https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string +const SEMVER_REGEX_CODE: &str = r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?"; + +/// Regular expression that matches a semver within a text. +static SEMVER_REGEX: LazyLock = LazyLock::new(|| + // unwrap safe, as this is covered by test `semver_regex_parses`. + Regex::new(SEMVER_REGEX_CODE).unwrap()); + +pub trait IsVersion: Debug + Display + PartialEq + Eq + Clone + Send + Sync + 'static { + fn find_in_text_internal(text: &str) -> Result; + + fn find_in_text(text: &str) -> Result + where Self: Sized { + Self::find_in_text_internal(text).context(r#"Failed to find semver in the text: "{text}"."#) + } +} + +impl IsVersion for Version { + fn find_in_text_internal(text: &str) -> Result { + let matched = + SEMVER_REGEX.find(text).context("No semver-like substring found within the text.")?; + let version_text = matched.as_str(); + Version::from_str(version_text) + } +} + +pub trait IsVersionPredicate: Display + Send + 'static { + type Version: IsVersion; + fn matches(&self, version: &Self::Version) -> bool; + fn require(&self, version: &Self::Version) -> Result { + ensure!( + self.matches(version), + "Version {} does not match the predicate {}.", + version, + self + ); + Ok(()) + } +} + +impl IsVersionPredicate for semver::VersionReq { + type Version = Version; + fn matches(&self, version: &Self::Version) -> bool { + self.matches(version) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn semver_regex_parses() { + let _ = SEMVER_REGEX.deref(); // Does not panic. + } + + #[test] + fn parse_cargo() -> Result { + let text = "cargo 1.57.0-nightly (c7957a74b 2021-10-11)"; + let version = Version::find_in_text(text)?; + assert_eq!(version.major, 1); + assert_eq!(version.minor, 57); + assert_eq!(version.patch, 0); + assert_eq!(version.pre, semver::Prerelease::new("nightly")?); + assert_eq!(version.build, <_>::default()); + Ok(()) + } +} diff --git a/build/ci_utils/src/program/with_cwd.rs b/build/ci_utils/src/program/with_cwd.rs new file mode 100644 index 0000000000..cfbdeacb0a --- /dev/null +++ b/build/ci_utils/src/program/with_cwd.rs @@ -0,0 +1,26 @@ +use crate::prelude::*; + + + +/// Wrapper over a program that invokes it with a given working directory. +#[derive(Clone, Debug, Default)] +pub struct WithCwd { + pub working_directory: Option, + pub underlying_program: T, +} + +impl Program for WithCwd { + fn executable_name(&self) -> &str { + self.underlying_program.executable_name() + } + + fn current_directory(&self) -> Option { + self.working_directory.clone() + } +} + +impl WithCwd { + pub fn new(underlying_program: T, working_directory: impl Into) -> Self { + Self { underlying_program, working_directory: Some(working_directory.into()) } + } +} diff --git a/build/ci_utils/src/programs.rs b/build/ci_utils/src/programs.rs new file mode 100644 index 0000000000..e38129fefb --- /dev/null +++ b/build/ci_utils/src/programs.rs @@ -0,0 +1,49 @@ +use crate::prelude::*; + + +// ============== +// === Export === +// ============== + +pub mod cargo; +pub mod cmd; +pub mod conda; +pub mod docker; +pub mod flatc; +pub mod git; +pub mod go; +pub mod graal; +pub mod java; +pub mod javac; +pub mod node; +pub mod npx; +pub mod pwsh; +pub mod robocopy; +pub mod rsync; +pub mod rustc; +pub mod rustup; +pub mod sbt; +pub mod seven_zip; +pub mod sh; +pub mod tar; +pub mod vs; +pub mod vswhere; +pub mod wasm_opt; +pub mod wasm_pack; + +pub use cargo::Cargo; +pub use cmd::Cmd; +pub use conda::Conda; +pub use docker::Docker; +pub use flatc::Flatc; +pub use git::Git; +pub use go::Go; +pub use java::Java; +pub use javac::Javac; +pub use node::Node; +pub use node::Npm; +pub use pwsh::PwSh; +pub use sbt::Sbt; +pub use seven_zip::SevenZip; +pub use sh::Bash; +pub use wasm_pack::WasmPack; diff --git a/build/ci_utils/src/programs/cargo.rs b/build/ci_utils/src/programs/cargo.rs new file mode 100644 index 0000000000..20ed5c418f --- /dev/null +++ b/build/ci_utils/src/programs/cargo.rs @@ -0,0 +1,137 @@ +use crate::prelude::*; + +use crate::env::new::Separated; +use crate::program::command::Manipulator; + + +// ============== +// === Export === +// ============== + +pub mod build_env; +pub mod clippy; +pub mod fmt; + + + +/// Extra flags that Cargo invokes rustc with. +/// +/// See: https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-reads +pub const CARGO_ENCODED_RUSTFLAGS: Separated = + Separated { separator: "\x1F", name: "CARGO_ENCODED_RUSTFLAGS" }; + +pub const RUSTFLAGS: Separated = Separated { separator: " ", name: "RUSTFLAGS" }; + +#[derive(Clone, Copy, Debug, Default)] +pub struct Cargo; + +impl Program for Cargo { + fn init_command<'a>(&self, cmd: &'a mut Self::Command) -> &'a mut Self::Command { + Color::Always.apply(cmd); + cmd + } + fn executable_name(&self) -> &'static str { + "cargo" + } +} + +/// Control when colored output is used. +#[derive(Clone, Copy, PartialEq, Eq, Debug, strum::AsRefStr)] +#[strum(serialize_all = "kebab-case")] +pub enum Command { + /// Compile the current package + Build, + /// Analyze the current package and report errors, but don't build object files + Check, + /// Remove the target directory + Clean, + /// Build this package's and its dependencies' documentation + Doc, + /// Create a new cargo package + New, + /// Create a new cargo package in an existing directory + Init, + /// Run a binary or example of the local package + Run, + /// Run the tests + Test, + /// Run the benchmarks + Bench, + /// Update dependencies listed in Cargo.lock + Update, + /// Search registry for crates + Search, + /// Package and upload this package to the registry + Publish, + /// Install a Rust binary. Default location is $HOME/.cargo/bin + Install, + /// Uninstall a Rust binary + Uninstall, +} + +impl Manipulator for Command { + fn apply(&self, command: &mut C) { + command.arg(self.as_ref()); + } +} + +/// Control when colored output is used. +#[derive(Clone, Copy, PartialEq, Eq, Debug, strum::AsRefStr)] +#[strum(serialize_all = "kebab-case")] +pub enum Color { + /// Never display colors. + None, + /// Always display colors. + Always, + /// Automatically detect if color support is available on the terminal. + Auto, +} + +impl Manipulator for Color { + fn apply(&self, command: &mut C) { + command.args(["--color", self.as_ref()]); + } +} + +#[derive(Clone, PartialEq, Eq, Debug, strum::AsRefStr)] +#[strum(serialize_all = "kebab-case")] +pub enum Options { + Workspace, + Package(String), + AllTargets, +} + +impl Manipulator for Options { + fn apply(&self, command: &mut C) { + let base_arg = format!("--{}", self.as_ref()); + command.arg(base_arg); + use Options::*; + match self { + Workspace | AllTargets => {} + Package(package_name) => { + command.arg(package_name.as_str()); + } + } + } +} + +/// Options for the `cargo run` command. +#[derive(Clone, PartialEq, Eq, Debug, strum::AsRefStr)] +#[strum(serialize_all = "kebab-case")] +pub enum RunOption { + /// Name of the bin target to run. + Bin(String), +} + +impl Manipulator for RunOption { + fn apply(&self, command: &mut C) { + let base_arg = format!("--{}", self.as_ref()); + command.arg(base_arg); + use RunOption::*; + match self { + Bin(binary_name) => { + command.arg(binary_name.as_str()); + } + } + } +} diff --git a/build/ci_utils/src/programs/cargo/build_env.rs b/build/ci_utils/src/programs/cargo/build_env.rs new file mode 100644 index 0000000000..0d72c86464 --- /dev/null +++ b/build/ci_utils/src/programs/cargo/build_env.rs @@ -0,0 +1,23 @@ +//! Environment variables that Cargo sets for build scripts. +//! +//! See: https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts + +use crate::prelude::*; + + + +crate::define_env_var! { + /// Checks if the current build is targeting wasm32. + /// + /// Relies on `TARGET` environment variable set by cargo for build scripts. + TARGET, String; + + /// The folder in which all output and intermediate artifacts should be placed. This folder is + /// inside the build directory for the package being built, and it is unique for the package in + /// question. + OUT_DIR, PathBuf; +} + +pub fn targeting_wasm() -> bool { + TARGET.get().map_or(false, |target| target.contains("wasm32")) +} diff --git a/build/ci_utils/src/programs/cargo/clippy.rs b/build/ci_utils/src/programs/cargo/clippy.rs new file mode 100644 index 0000000000..1514356f90 --- /dev/null +++ b/build/ci_utils/src/programs/cargo/clippy.rs @@ -0,0 +1 @@ +pub const COMMAND: &str = "clippy"; diff --git a/build/ci_utils/src/programs/cargo/fmt.rs b/build/ci_utils/src/programs/cargo/fmt.rs new file mode 100644 index 0000000000..eec3b101ef --- /dev/null +++ b/build/ci_utils/src/programs/cargo/fmt.rs @@ -0,0 +1,17 @@ +use crate::prelude::*; + +use crate::programs::Cargo; + +use std::process::Stdio; + + + +#[context("Failed to run cargo fmt on path '{}'", path.as_ref().display())] +pub async fn format(path: impl AsRef) -> Result { + command()?.with_stdin(Stdio::null()).with_current_dir(&path).run_ok().await +} + +/// Base command invoking cargo-fmt. +pub fn command() -> Result { + Cargo.cmd().map(|c| c.with_arg("fmt")) +} diff --git a/build/ci_utils/src/programs/cmd.rs b/build/ci_utils/src/programs/cmd.rs new file mode 100644 index 0000000000..17111798f5 --- /dev/null +++ b/build/ci_utils/src/programs/cmd.rs @@ -0,0 +1,167 @@ +use crate::prelude::*; + +use crate::env; +use crate::env::Modification; +use crate::programs::cmd::args::RUN_COMMAND; + +use std::process::Stdio; +use unicase::UniCase; + + + +#[derive(Clone, Copy, Debug)] +pub struct Cmd; + +lazy_static::lazy_static! { + static ref COMMAND_GLUE: [OsString; 3] = ["&&", "cls", "&&"].map(<_>::into); +} + +pub mod args { + /// Turns echo off. + pub const ECHO_OFF: &str = "/Q"; + + /// Carries out the command specified by string and then terminates. + pub const RUN_COMMAND: &str = "/C"; +} + + +impl Program for Cmd { + fn executable_name(&self) -> &'static str { + "cmd" + } +} + +impl Shell for Cmd { + fn run_command(&self) -> Result { + let mut cmd = self.cmd()?; + cmd.arg(RUN_COMMAND); + Ok(cmd) + } + + fn run_script(&self, script_path: impl AsRef) -> Result { + let mut command = self.run_command()?; + command.arg(script_path.as_ref()); + Ok(command) + } + + fn run_shell(&self) -> Result { + self.cmd() + } +} + +pub fn run_commands(commands: Cmds) -> anyhow::Result +where + Cmds: IntoIterator>, + Arg: AsRef, { + let mut ret = Cmd.run_command()?; + ret.stdin(Stdio::null()).stdout(Stdio::piped()); + + let mut command_itr = commands.into_iter(); + if let Some(first) = command_itr.next() { + ret.args(first); + } + for following_command in command_itr { + add_next_command(&mut ret, following_command); + } + Ok(ret) +} + +pub fn add_next_command( + cmd: &mut Command, + command: impl IntoIterator>, +) -> &mut Command { + cmd.args(COMMAND_GLUE.iter()).args(command) +} + +pub fn split_command_outputs(output: &[u8]) -> impl Iterator { + const ASCII_FORM_FEED: u8 = 0xC; + output.split(|byte| *byte == ASCII_FORM_FEED) +} + +pub async fn compare_env( + f: impl FnOnce(&mut Command) -> &mut Command, +) -> Result> { + let mut cmd = run_commands([["set"]])?; + cmd.args(COMMAND_GLUE.iter()); + f(&mut cmd); + add_next_command(&mut cmd, ["set"]); + let output = cmd.output_ok().await?; + let outputs = + split_command_outputs(&output.stdout).map(std::str::from_utf8).try_collect_vec()?; + + ensure!(outputs.len() == 3, "Expected outputs from all 3 commands!"); + + let mut environment_before = parse_dumped_env( + outputs.first().ok_or_else(|| anyhow!("Missing initial environment dump!"))?, + )?; + let environment_after = parse_dumped_env( + outputs.last().ok_or_else(|| anyhow!("Missing final environment dump!"))?, + )?; + + // dbg!(&environment_after); + let mut changes = environment_after + .into_iter() + .filter_map(|(variable_name, new_value)| { + let path_like = is_path_like(&variable_name); + let action = match environment_before.remove(&variable_name) { + Some(old_value) => + if new_value != old_value { + if path_like { + // Check which elements are new and whether they are prepended. + // todo!(); + env::Action::PrependPaths(std::env::split_paths(&new_value).collect()) + } else { + env::Action::Set(new_value) + } + } else { + return None; + }, + None if path_like => + env::Action::PrependPaths(std::env::split_paths(&new_value).collect()), + None => env::Action::Set(new_value), + }; + Some(Modification { variable_name, action }) + }) + .collect_vec(); + + changes.extend( + environment_before + .into_iter() + .map(|(variable_name, _)| Modification { variable_name, action: env::Action::Remove }), + ); + // dbg!(&changes); + + Ok(changes) +} + +const PATH_LIKE: [&str; 4] = ["INCLUDE", "LIB", "LIBPATH", "PATH"]; + +pub fn is_path_like(variable_name: impl AsRef) -> bool { + let variable_name = UniCase::<&str>::from(variable_name.as_ref()); + PATH_LIKE.iter().any(|pathlike| UniCase::<&str>::from(*pathlike) == variable_name) +} + +pub fn parse_dumped_env(output: &str) -> Result, String>> { + // debug!("Got env:\n{}\n\n\n", output); + let non_empty_lines = output.lines().map(|line| line.trim()).filter(|line| !line.is_empty()); + non_empty_lines + .map(|line| match line.split_once('=') { + Some((name, value)) => Ok((name.into(), value.into())), + _ => Err(anyhow!("Cannot parse line {}!", line)), + }) + .collect() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn path_like() { + assert!(is_path_like("Path")); + assert!(is_path_like("PATH")); + assert!(is_path_like("PaTh")); + assert!(!is_path_like("PETh")); + assert!(!is_path_like("foo")); + } +} diff --git a/build/ci_utils/src/programs/conda.rs b/build/ci_utils/src/programs/conda.rs new file mode 100644 index 0000000000..d186d5d084 --- /dev/null +++ b/build/ci_utils/src/programs/conda.rs @@ -0,0 +1,19 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug)] +pub struct Conda; + +impl Program for Conda { + fn executable_name(&self) -> &'static str { + "conda" + } + fn default_locations(&self) -> Vec { + if let Some(path) = std::env::var_os("CONDA") { + vec![PathBuf::from(path)] + } else { + default() + } + } +} diff --git a/build/ci_utils/src/programs/docker.rs b/build/ci_utils/src/programs/docker.rs new file mode 100644 index 0000000000..7afa6d8cd7 --- /dev/null +++ b/build/ci_utils/src/programs/docker.rs @@ -0,0 +1,554 @@ +use crate::prelude::*; + +use crate::env::new::TypedVariable; +use crate::extensions::child::ChildExt; + +use shrinkwraprs::Shrinkwrap; +use std::collections::HashMap; +use std::fmt::Formatter; +use std::process::Stdio; +use std::str::FromStr; + + + +#[derive(Clone, Debug, PartialEq, Ord, PartialOrd, Eq, Hash)] +pub enum NetworkDriver { + // Linux + Bridge, + Host, + Overlay, + Ipvlan, + Macvlan, + None, + // Windows + Ics, + Nat, + Transparent, + L2bridge, + Null, + // + Other(String), +} + +impl AsRef for NetworkDriver { + fn as_ref(&self) -> &str { + match self { + NetworkDriver::Bridge => "bridge", + NetworkDriver::Host => "host", + NetworkDriver::Overlay => "overlay", + NetworkDriver::Ipvlan => "ipvlan", + NetworkDriver::Macvlan => "macvlan", + NetworkDriver::None => "none", + NetworkDriver::Ics => "ics", + NetworkDriver::Nat => "nat", + NetworkDriver::Transparent => "transparent", + NetworkDriver::L2bridge => "l2bridge", + NetworkDriver::Null => "null", + NetworkDriver::Other(name) => name.as_str(), + } + } +} + +impl Default for NetworkDriver { + fn default() -> Self { + if TARGET_OS == OS::Windows { + NetworkDriver::Nat + } else { + NetworkDriver::Bridge + } + } +} + + +#[derive(Clone, Debug, PartialEq, Ord, PartialOrd, Eq, Hash)] +pub struct NetworkInfo { + pub id: String, + pub name: String, + pub driver: NetworkDriver, + pub scope: String, +} + +#[derive(Clone, Debug)] +pub struct Credentials { + pub username: String, + pub password: String, + pub server: String, +} + +impl Credentials { + pub fn new( + username: impl Into, + password: impl Into, + server: impl Into, + ) -> Self { + Self { username: username.into(), password: password.into(), server: server.into() } + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Docker; + +impl Program for Docker { + fn executable_name(&self) -> &'static str { + "docker" + } +} + +impl Docker { + pub async fn build(&self, options: BuildOptions) -> Result { + let mut command = self.cmd()?; + command.arg("build").args(options.args()); + debug!("{:?}", command); + let output = command.output_ok().await?; + trace!("Output: {:?}", output); + let built_image_id = std::str::from_utf8(&output.stdout)? + .lines() + .inspect(|line| debug!("{}", line)) + .filter(|line| !line.is_empty()) + .last() + .ok_or_else(|| anyhow!("Docker provided no output"))? + .split(' ') + .last() + .ok_or_else(|| anyhow!("The last line has no space!"))?; + debug!("Image {} successfully built!", built_image_id); + Ok(ImageId(built_image_id.into())) + } + + pub fn run_cmd(&self, options: &RunOptions) -> Result { + let mut cmd = self.cmd()?; + cmd.arg("run").args(options.args()); + Ok(cmd) + } + + pub async fn run(&self, options: &RunOptions) -> Result { + self.run_cmd(options)?.run_ok().await + } + + pub async fn create(&self, options: &RunOptions) -> Result { + let output = self.cmd()?.arg("create").args(options.args()).output_ok().await?; + Ok(ContainerId(output.single_line_stdout()?)) + } + + pub async fn remove_container(&self, name: &ContainerId, force: bool) -> Result { + let force_arg = if force { ["-f"].as_slice() } else { [].as_slice() }; + self.cmd()?.arg("rm").args(force_arg).arg(name.as_ref()).run_ok().await + } + + pub async fn run_detached(&self, options: &RunOptions) -> Result { + let output = + dbg!(self.cmd()?.arg("run").arg("-d").args(options.args())).output_ok().await?; + // dbg!(&output); + Ok(ContainerId(output.single_line_stdout()?)) + // output.status.exit_ok()?; + } + + pub async fn kill(&self, target: impl AsRef) -> Result { + Docker.cmd()?.args(["kill", target.as_ref()]).run_ok().await + } + + pub async fn upload( + &self, + from: impl AsRef, + container: &ContainerId, + to: impl AsRef, + ) -> Result { + self.cmd()? + .arg("cp") + .arg("--archive") + .arg(from.as_ref()) + .arg(format!("{}:{}", container.as_str(), to.as_ref().display())) + .run_ok() + .await + } + + pub async fn start(&self, container: &ContainerId) -> Result { + self.cmd()?.arg("start").arg(container.as_str()).run_ok().await + } + + /// Returns network ID. + pub async fn create_network( + &self, + driver: &NetworkDriver, + name: impl AsRef, + ) -> Result { + Docker + .cmd()? + .args(["network", "create", "--driver", driver.as_ref(), name.as_ref()]) + .output_ok() + .await? + .single_line_stdout() + } + + /// Returns network ID. + pub async fn remove_network(&self, name_or_id: impl AsRef) -> Result { + Docker + .cmd()? + .args(["network", "rm", name_or_id.as_ref()]) + .output_ok() + .await? + .single_line_stdout() + } + + pub async fn list_networks(&self) -> Result> { + let mut cmd = Docker.cmd()?; + cmd.args(["network", "ls", "--no-trunc"]); + cmd.stdout(Stdio::piped()); + let stdout = cmd.output_ok().await?.stdout; + let stdout = String::from_utf8(stdout)?; + + let mut ret = Vec::new(); + for line in stdout.lines().skip(1) { + // Network name can contain spaces, e.g. "Default Switch". + // It seems that columns are separated by at least 3 spaces. + match line.split(" ").filter(|word| !word.is_empty()).collect_vec().as_slice() { + [id, name, driver, scope] => ret.push(NetworkInfo { + id: id.to_string(), + driver: match *driver { + "bridge" => NetworkDriver::Bridge, + "host" => NetworkDriver::Host, + "overlay" => NetworkDriver::Overlay, + "ipvlan" => NetworkDriver::Ipvlan, + "macvlan" => NetworkDriver::Macvlan, + "none" => NetworkDriver::None, + "ics" => NetworkDriver::Ics, + "nat" => NetworkDriver::Nat, + "transparent" => NetworkDriver::Transparent, + "l2bridge" => NetworkDriver::L2bridge, + "null" => NetworkDriver::Null, + name => NetworkDriver::Other(name.to_string()), + }, + name: name.to_string(), + scope: scope.to_string(), + }), + _ => bail!("Failed to parse line: {}", line), + } + } + Ok(ret) + } + + pub async fn while_logged_in>, T>( + &self, + credentials: Credentials, + f: impl FnOnce() -> F, + ) -> F::Output { + self.login(&credentials).await?; + let ret = f().await; + // Logout before returning result. + self.logout(&credentials.server).await?; + ret + } + + pub async fn login(&self, credentials: &Credentials) -> Result { + let Credentials { username, password, server } = credentials; + let mut cmd = self.cmd()?; + cmd.args(["login", "--username", username, "--password-stdin", server]); + cmd.stdin(Stdio::piped()); + let mut process = cmd.spawn()?; + let stdin = process.stdin.as_mut().context("Failed to open stdin")?; + stdin.write_all(password.as_bytes()).await?; + process.wait_ok().await + } + + pub async fn logout(&self, registry: &str) -> Result { + let mut cmd = self.cmd()?; + cmd.args(["logout", registry]); + cmd.run_ok().await + } + + pub async fn push(&self, image: &str) -> Result { + let mut cmd = self.cmd()?; + cmd.args(["push", image]); + cmd.run_ok().await + } +} + +#[derive(Clone, Debug)] +pub struct BuildOptions { + pub context: PathBuf, + pub target: Option, + pub tags: Vec, + pub build_args: HashMap>, + pub file: Option, +} + +impl BuildOptions { + pub fn new(context_path: impl Into) -> Self { + Self { + context: context_path.into(), + target: default(), + tags: default(), + build_args: default(), + file: default(), + } + } + + pub fn add_build_arg_from_env_or( + &mut self, + name: impl AsRef, + f: impl FnOnce() -> Result, + ) -> Result + where + R: ToString, + { + let value = match std::env::var(name.as_ref()) { + Ok(env_value) => env_value, + Err(_) => f()?.to_string(), + }; + self.build_args.insert(name.as_ref().into(), Some(value)); + Ok(()) + } + + pub fn args(&self) -> Vec { + let mut ret = Vec::new(); + ret.push(self.context.clone().into()); + if let Some(target) = self.target.as_ref() { + ret.push("--target".into()); + ret.push(target.clone()); + } + for tag in &self.tags { + ret.push("--tag".into()); + ret.push(tag.into()); + } + for (name, value) in &self.build_args { + ret.push("--build-arg".into()); + if let Some(value) = value { + ret.push(format!("{name}={value}").into()); + } else { + ret.push(name.into()); + } + } + if let Some(file) = self.file.as_ref() { + ret.push("--file".into()); + // Docker can't handle verbatim Dockerfile path. It would fail like: + // `unable to prepare context: unable to get relative Dockerfile path: Rel: can't make + // \\?\C:\Users\mwu\ci\image\windows\Dockerfile relative to + // C:\Users\mwu\AppData\Local\Temp\2\.tmpOykTop` + ret.push(file.without_verbatim_prefix().into()); + } + ret + } +} + +/// Using the --restart flag on Docker run you can specify a restart policy for how a container +/// should or should not be restarted on exit. +#[derive(Clone, Copy, Debug)] +pub enum RestartPolicy { + /// Do not automatically restart the container when it exits. This is the default. + No, + /// Restart only if the container exits with a non-zero exit status. + OnFailure { + /// Optionally, limit the number of restart retries the Docker daemon attempts. + max_retries: Option, + }, + /// Always restart the container regardless of the exit status. When you specify always, the + /// Docker daemon will try to restart the container indefinitely. The container will also + /// always start on daemon startup, regardless of the current state of the container. + Always, + /// Always restart the container regardless of the exit status, including on daemon startup, + /// except if the container was put into a stopped state before the Docker daemon was stopped. + UnlessStopped, +} + +impl RestartPolicy { + pub fn print_args(self) -> [OsString; 2] { + let value = match self { + RestartPolicy::No => "no".into(), + RestartPolicy::OnFailure { max_retries: Some(max_retries) } => + format!("on-failure:{}", max_retries).into(), + RestartPolicy::OnFailure { max_retries: None } => "on-failure:{}".into(), + RestartPolicy::Always => "always".into(), + RestartPolicy::UnlessStopped => "unless-stopped".into(), + }; + ["--restart".into(), value] + } +} + +#[derive(Clone, Debug)] +pub enum Network { + Bridge, + Host, + User(String), + Container(ContainerId), +} + +impl Default for Network { + fn default() -> Self { + Network::Bridge + } +} + +impl Display for Network { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Network::Bridge => write!(f, "bridge"), + Network::Host => write!(f, "host"), + Network::User(name) => write!(f, "{}", name), + Network::Container(name_or_id) => write!(f, "container:{}", name_or_id), + } + } +} + +#[derive(Clone, Debug)] +pub struct RunOptions { + pub image: ImageId, + pub working_directory: Option, + pub volume: Vec<(PathBuf, PathBuf)>, + pub command: Vec, + pub name: Option, + pub restart: Option, + pub env: HashMap, + /// Mapping host port => guest port. + pub ports: HashMap, + pub network: Option, + pub storage_size_gb: Option, + /// Proxy all received signals to the process (non-TTY mode only). + pub sig_proxy: Option, +} + +impl RunOptions { + pub fn new(image: ImageId) -> Self { + Self { + image, + working_directory: default(), + volume: default(), + command: default(), + name: default(), + restart: default(), + env: default(), + ports: default(), + network: default(), + storage_size_gb: default(), + sig_proxy: default(), + } + } + + pub fn env_raw(&mut self, name: impl Into, value: impl Into) -> &mut Self { + self.env.insert(name.into(), value.into()); + self + } + + pub fn volume(&mut self, host: impl Into, guest: impl Into) -> &mut Self { + self.volume.push((host.into(), guest.into())); + self + } + + pub fn env( + &mut self, + variable: &T, + value: impl Borrow, + ) -> Result<&mut Self> { + Ok(self.env_raw(variable.name(), variable.generate(value.borrow())?)) + } + + pub fn bind_docker_daemon(&mut self) { + let path = match TARGET_OS { + OS::Windows => r"\\.\pipe\docker_engine", + OS::Linux => r"/var/run/docker.sock", + _ => unimplemented!("OS {} is not supported!", TARGET_OS), + }; + self.volume.push((PathBuf::from(path), PathBuf::from(path))); + } + + pub fn storage_size_gb(&mut self, storage_size_in_gb: usize) -> &mut Self { + self.storage_size_gb = Some(storage_size_in_gb); + self + } + + pub fn publish_port(&mut self, host_port: u16, container_port: u16) -> &mut Self { + self.ports.insert(host_port, container_port); + self + } + + pub fn args(&self) -> Vec { + let mut ret = Vec::new(); + if let Some(working_directory) = self.working_directory.as_ref() { + ret.push("--workdir".into()); + ret.push(working_directory.clone().into()); + } + for (volume_src, volume_dst) in &self.volume { + ret.push("--volume".into()); + + let mut mapping = volume_src.clone().into_os_string(); + mapping.push(":"); + mapping.push(volume_dst); + ret.push(mapping); + } + if let Some(name) = self.name.as_ref() { + ret.push("--name".into()); + ret.push(name.into()); + } + if let Some(restart) = self.restart.as_ref() { + ret.extend(restart.print_args()); + } + + for (name, value) in &self.env { + ret.push("--env".into()); + let mut mapping = name.clone(); + mapping.push("="); + mapping.push(value); + ret.push(mapping); + } + + for (host, guest) in &self.ports { + ret.push("-p".into()); + ret.push(format!("{host}:{guest}").into()); + } + + if let Some(network) = self.network.as_ref() { + let arg = format!(r#"--network={network}"#); + ret.push(arg.into()); + } + + if let Some(storage_size_gb) = self.storage_size_gb { + // e.g. --storage-opt size=120G + ret.push("--storage-opt".into()); + ret.push(format!("size={}G", storage_size_gb).into()); + } + + if let Some(sig_proxy) = self.sig_proxy { + let arg = format!(r#"--sig-proxy={sig_proxy}"#); + ret.push(arg.into()); + } + + ret.push(OsString::from(&self.image.0)); + + ret.extend(self.command.clone()); + ret + } +} + +#[derive(Clone, Display, Debug)] +pub struct ImageId(pub String); + +#[derive(Clone, Debug, Display, Shrinkwrap)] +pub struct ContainerId(pub String); + +impl FromStr for ContainerId { + type Err = anyhow::Error; + + fn from_str(s: &str) -> std::result::Result { + Ok(ContainerId(s.into())) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + #[ignore] + async fn network() -> Result { + dbg!(Docker.list_networks().await?); + dbg!(Docker.remove_network("fd").await?); + dbg!(Docker.create_network(&default(), "fd").await?); + Ok(()) + } + + #[tokio::test] + #[ignore] + async fn build() -> Result { + let opts = BuildOptions::new(r"C:\Users\mwu\ci\image\windows\"); + dbg!(Docker.build(opts).await?); + Ok(()) + } +} diff --git a/build/ci_utils/src/programs/flatc.rs b/build/ci_utils/src/programs/flatc.rs new file mode 100644 index 0000000000..989e029c13 --- /dev/null +++ b/build/ci_utils/src/programs/flatc.rs @@ -0,0 +1,12 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug, Default)] +pub struct Flatc; + +impl Program for Flatc { + fn executable_name(&self) -> &'static str { + "flatc" + } +} diff --git a/build/ci_utils/src/programs/git.rs b/build/ci_utils/src/programs/git.rs new file mode 100644 index 0000000000..217f9b45e8 --- /dev/null +++ b/build/ci_utils/src/programs/git.rs @@ -0,0 +1,129 @@ +use crate::prelude::*; + +use crate::new_command_type; + + +// ============== +// === Export === +// ============== + +pub mod clean; + +pub use clean::Clean; + + + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct Git { + /// The path to the repository root above the `working_dir`. + /// + /// Many paths that git returns are relative to the repository root. + repo_path: PathBuf, + /// Directory in which commands will be invoked. + /// It might not be the repository root and it makes difference for many commands. + working_dir: PathBuf, +} + +impl Program for Git { + type Command = GitCommand; + fn executable_name(&self) -> &'static str { + "git" + } + fn current_directory(&self) -> Option { + Some(self.working_dir.clone()) + } +} + +impl Git { + pub async fn new(repo_path: impl Into) -> Result { + let repo_path = repo_path.into(); + let temp_git = Git { working_dir: repo_path.clone(), repo_path }; + let repo_path = temp_git.repository_root().await?; + Ok(Git { repo_path, working_dir: temp_git.working_dir }) + } + + pub async fn new_current() -> Result { + Git::new(crate::env::current_dir()?).await + } + + pub async fn head_hash(&self) -> Result { + self.cmd()?.args(["rev-parse", "--verify", "HEAD"]).output_ok().await?.single_line_stdout() + } + + /// List of files that are different than the compared commit. + #[context("Failed to list files that are different than {}.", compare_against.as_ref())] + pub async fn diff_against(&self, compare_against: impl AsRef) -> Result> { + let root = self.repo_path.as_path(); + Ok(self + .cmd()? + .args(["diff", "--name-only", compare_against.as_ref()]) + .output_ok() + .await? + .into_stdout_string()? + .lines() + .map(|line| root.join(line.trim()).normalize()) + .collect_vec()) + } + + pub async fn repository_root(&self) -> Result { + let output = self + .cmd()? + .args(["rev-parse", "--show-toplevel"]) + .output_ok() + .await? + .single_line_stdout()?; + let path = PathBuf::from(output).normalize(); + Ok(path) + } +} + + +new_command_type!(Git, GitCommand); + +impl GitCommand { + pub fn clean(&mut self) -> &mut Self { + self.arg(Command::Clean) + .apply(&Clean::Ignored) + .apply(&Clean::Force) + .apply(&Clean::UntrackedDirectories) + } + pub fn nice_clean(&mut self) -> &mut Self { + self.clean().apply(&Clean::Exclude(".idea".into())) + } +} + +#[derive(Clone, Copy, Debug)] +pub enum Command { + Clean, +} + +impl AsRef for Command { + fn as_ref(&self) -> &OsStr { + match self { + Command::Clean => OsStr::new("clean"), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + #[ignore] + async fn repo_root() -> Result { + let git = Git::new(".").await?; + let diff = git.repository_root().await?; + println!("{:?}", diff); + Ok(()) + } + + #[tokio::test] + #[ignore] + async fn call_diff() -> Result { + let git = Git::new(".").await?; + let diff = git.diff_against("origin/develop").await?; + println!("{:?}", diff); + Ok(()) + } +} diff --git a/build/ci_utils/src/programs/git/clean.rs b/build/ci_utils/src/programs/git/clean.rs new file mode 100644 index 0000000000..ed71856599 --- /dev/null +++ b/build/ci_utils/src/programs/git/clean.rs @@ -0,0 +1,114 @@ +use crate::prelude::*; + +use crate::path::trie::Trie; +use crate::program::command::Manipulator; +use crate::programs::Git; + +use std::path::Component; + + + +#[derive(Clone, Debug)] +pub struct DirectoryToClear<'a> { + pub prefix: Vec>, + pub trie: &'a Trie<'a>, +} + +/// Run ``git clean -xfd`` but preserve the given paths. +/// +/// This may involve multiple git clean calls on different subtrees. +/// Given paths can be either absolute or relative. If relative, they are relative to the +/// repository root. +pub async fn clean_except_for( + repo_root: impl AsRef, + paths: impl IntoIterator>, +) -> Result { + let root = repo_root.as_ref().canonicalize()?; + + let relative_exclusions: Vec = paths + .into_iter() + // We use filter_map, so invalid (e.g. not existing) paths are ignored. + .filter_map(|p| { + let path: &Path = p.as_ref(); + // If we get a relative path, we treat it as relative to the repository root. + let canonical_path = + if path.is_relative() { root.join(path) } else { path.to_path_buf() } + .canonicalize() + .ok()?; + + canonical_path.strip_prefix(&root).ok().map(ToOwned::to_owned) + }) + .collect_vec(); + + let trie = Trie::from_iter(relative_exclusions.iter()); + + let mut directories_to_clear = vec![DirectoryToClear { prefix: vec![], trie: &trie }]; + while let Some(DirectoryToClear { prefix, trie }) = directories_to_clear.pop() { + let current_dir = root.join_iter(&prefix); + let exclusions_in_current_dir = + trie.children.keys().map(|c| Clean::Exclude(c.as_os_str().to_string_lossy().into())); + let git = Git::new(¤t_dir).await?; + git.cmd()?.clean().apply_iter(exclusions_in_current_dir).run_ok().await?; + + for (child_name, child_trie) in trie.children.iter() { + if !child_trie.is_leaf() { + let mut prefix = prefix.clone(); + prefix.push(*child_name); + directories_to_clear.push(DirectoryToClear { prefix, trie: child_trie }); + } + } + } + + Ok(()) +} + +#[derive(Clone, Debug)] +pub enum Clean { + /// Normally, when no path is specified, `git clean` will not recurse into untracked + /// directories to avoid removing too much. Specify this option to have it recurse into such + /// directories as well. If any paths are specified, this option is irrelevant; all untracked + /// files matching the specified paths (with exceptions for nested git directories mentioned + /// under `Force`) will be removed. + UntrackedDirectories, + + /// If the Git configuration variable clean.requireForce is not set to false, git clean will + /// refuse to delete files or directories unless given `Force` or `Interactive`. Git will + /// refuse to modify untracked nested git repositories (directories with a .git subdirectory) + /// unless a second `Force` is given. + Force, + + /// Show what would be done and clean files interactively. + Interactive, + + /// Don’t actually remove anything, just show what would be done. + DryRun, + + /// Use the given exclude pattern in addition to the standard ignore rules. + Exclude(String), + + /// Don’t use the standard ignore rules, but still use the ignore rules given with `Exclude` + /// options from the command line. This allows removing all untracked files, including build + /// products. This can be used (possibly in conjunction with git restore or git reset) to + /// create a pristine working directory to test a clean build. + Ignored, + + /// Remove only files ignored by Git. This may be useful to rebuild everything from scratch, + /// but keep manually created files. + OnlyIgnored, +} + +impl Manipulator for Clean { + fn apply(&self, command: &mut C) { + // fn apply<'a, C: IsCommandWrapper + ?Sized>(&self, c: &'a mut C) -> &'a mut C { + let args: Vec<&str> = match self { + Clean::UntrackedDirectories => vec!["-d"], + Clean::Force => vec!["-f"], + Clean::Interactive => vec!["-i"], + Clean::DryRun => vec!["-n"], + Clean::Exclude(pattern) => vec!["-e", pattern.as_ref()], + Clean::Ignored => vec!["-x"], + Clean::OnlyIgnored => vec!["-X"], + }; + command.args(args); + } +} diff --git a/build/ci_utils/src/programs/go.rs b/build/ci_utils/src/programs/go.rs new file mode 100644 index 0000000000..ac00e6df1d --- /dev/null +++ b/build/ci_utils/src/programs/go.rs @@ -0,0 +1,28 @@ +use crate::prelude::*; + +use crate::programs::Program; + + + +#[derive(Clone, Copy, Debug, Default)] +pub struct Go; + +impl Program for Go { + fn executable_name(&self) -> &'static str { + "go" + } + fn default_locations(&self) -> Vec { + std::env::vars() + .filter(|(name, _)| name.starts_with("GOROOT_")) + .max_by(|(name1, _), (name2, _)| name1.cmp(name2)) + .map(|(_, value)| PathBuf::from(value).join("bin")) + .into_iter() + .collect() + } + + fn version_command(&self) -> Result { + let mut cmd = self.cmd()?; + cmd.arg("version"); + Ok(cmd) + } +} diff --git a/build/ci_utils/src/programs/graal.rs b/build/ci_utils/src/programs/graal.rs new file mode 100644 index 0000000000..a6e9f5de92 --- /dev/null +++ b/build/ci_utils/src/programs/graal.rs @@ -0,0 +1,99 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug, Default)] +pub struct Gu; + +impl Program for Gu { + fn executable_name(&self) -> &'static str { + "gu" + } +} + + +pub fn take_until_whitespace(text: &str) -> &str { + text.split_whitespace().next().unwrap_or(text) +} + +/// Support for sulong has not been implemented for Windows yet. +/// +/// See: https://github.com/oracle/graal/issues/1160 +pub fn sulong_supported() -> bool { + TARGET_OS != OS::Windows +} + +pub async fn list_components() -> Result> { + let output = Gu.cmd()?.arg("list").output_ok().await?; + let lines = std::str::from_utf8(&output.stdout)?.lines(); + let lines = lines.skip(2); // We drop header and table dash separator lines. + Ok(lines + .filter_map(|line| { + let name = take_until_whitespace(line); + match Component::from_str(name) { + Ok(component) => Some(component), + Err(e) => { + warn!("Unrecognized component name '{name}'. Error: {e}"); + None + } + } + }) + .collect()) +} + +pub async fn install_missing_components(components: impl IntoIterator) -> Result { + let already_installed = list_components().await?; + let missing_components = + components.into_iter().filter(|c| !already_installed.contains(c)).collect_vec(); + // We want to avoid running `gu install` when all required components are already installed, + // as this command might require root privileges in some environments. + if !missing_components.is_empty() { + let mut cmd = Gu.cmd()?; + cmd.arg("install"); + for missing_component in missing_components { + cmd.arg(missing_component.as_ref()); + } + cmd.run_ok().await?; + } else { + debug!("All required components are installed."); + } + Ok(()) +} + +#[derive( + Clone, + Copy, + Hash, + PartialEq, + Eq, + Debug, + strum::Display, + strum::AsRefStr, + strum::EnumString +)] +#[strum(serialize_all = "kebab-case")] +pub enum Component { + #[strum(serialize = "graalvm")] + GraalVM, + JS, + NativeImage, + Python, + #[strum(serialize = "R")] + R, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::log::setup_logging; + + #[tokio::test] + #[ignore] + async fn gu_list() -> Result { + setup_logging()?; + // let output = Gu.cmd()?.arg("list").output_ok().await?; + // println!("{:?}", std::str::from_utf8(&output.stdout)?); + dbg!(list_components().await)?; + Ok(()) + } +} diff --git a/build/ci_utils/src/programs/java.rs b/build/ci_utils/src/programs/java.rs new file mode 100644 index 0000000000..6fe7e05cb4 --- /dev/null +++ b/build/ci_utils/src/programs/java.rs @@ -0,0 +1,92 @@ +use crate::prelude::*; + +use crate::program::command::Manipulator; + + + +crate::define_env_var! { + /// Java installation directory. + JAVA_HOME, PathBuf; +} + +#[derive(Clone, Debug, derive_more::Deref, derive_more::DerefMut)] +pub struct Classpath(pub Vec); + +impl Classpath { + pub fn new(paths: impl IntoIterator>) -> Self { + Classpath(paths.into_iter().map(|p| p.as_ref().to_path_buf()).collect()) + } +} + +impl Manipulator for Classpath { + fn apply(&self, command: &mut C) { + // Java uses same separator for classpaths entries as native PATH separator. + let Ok(paths) = std::env::join_paths(&self.0) else { + panic!("Invalid character in paths: {:?}", &self.0) + }; + command.arg("--class-path").arg(paths); + } +} + +#[derive(Clone, Copy, Debug)] +pub enum Option { + EnableAssertions, +} + +impl AsRef for Option { + fn as_ref(&self) -> &str { + match self { + Option::EnableAssertions => "-enableassertions", + } + } +} + +impl AsRef for Option { + fn as_ref(&self) -> &OsStr { + OsStr::new::(self.as_ref()) + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Java; + +impl Program for Java { + fn executable_name(&self) -> &'static str { + "java" + } +} + +impl Java { + pub async fn check_language_version(&self) -> Result { + let version_string = self.version_string().await?; + Ok(LanguageVersion(self.parse_version(&version_string)?.major as u8)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_version() { + let contents = "openjdk 11.0.11 2021-04-20\nOpenJDK Runtime Environment GraalVM CE 21.1.0 (build 11.0.11+8-jvmci-21.1-b05)\nOpenJDK 64-Bit Server VM GraalVM CE 21.1.0 (build 11.0.11+8-jvmci-21.1-b05, mixed mode, sharing)"; + assert_eq!(Java.parse_version(contents).unwrap(), Version::new(11, 0, 11)); + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Shrinkwrap)] +pub struct LanguageVersion(pub u8); + +impl std::str::FromStr for LanguageVersion { + type Err = anyhow::Error; + + fn from_str(s: &str) -> std::result::Result { + s.parse2::().map(LanguageVersion) + } +} + +impl Display for LanguageVersion { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "java{}", self.0) + } +} diff --git a/build/ci_utils/src/programs/javac.rs b/build/ci_utils/src/programs/javac.rs new file mode 100644 index 0000000000..117be11298 --- /dev/null +++ b/build/ci_utils/src/programs/javac.rs @@ -0,0 +1,35 @@ +use crate::prelude::*; + +use crate::program::command::Manipulator; + + +// ============== +// === Export === +// ============== + +pub use crate::programs::java::Classpath; + + + +#[derive(Clone, Debug)] +pub enum Options { + /// Specify where to place generated class files + Directory(PathBuf), +} + +impl Manipulator for Options { + fn apply(&self, command: &mut C) { + match self { + Options::Directory(path) => command.arg("-d").arg(path), + }; + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Javac; + +impl Program for Javac { + fn executable_name(&self) -> &str { + "javac" + } +} diff --git a/build/ci_utils/src/programs/node.rs b/build/ci_utils/src/programs/node.rs new file mode 100644 index 0000000000..86834fe869 --- /dev/null +++ b/build/ci_utils/src/programs/node.rs @@ -0,0 +1,54 @@ +use crate::prelude::*; + +use crate::new_command_type; + + + +#[derive(Clone, Copy, Debug, Default)] +pub struct Node; + +impl Program for Node { + fn executable_name(&self) -> &'static str { + "node" + } +} + +new_command_type! {Npm, NpmCommand} + +impl NpmCommand { + pub fn install(&mut self) -> &mut Self { + // // We must strip any UNC prefix, because CMD does not support having it as a current + // // directory, and npm is effectively a CMD script wrapping the actual program. See: + // // https://github.com/npm/cli/issues/3349 + // // + // // If this becomes an issue, consider toggling `DisableUNCCheck` on win runner machines + // and // revert this workaround. See also: + // // https://www.ibm.com/support/pages/disableunccheck-registry-key-created-during-rational-synergy-installation + // let path = dbg!(path.as_ref().strip_prefix(r"\\?\")).unwrap_or(path.as_ref()); + self.arg("install"); + self + } + pub fn workspace(&mut self, workspace: impl AsRef) -> &mut Self { + self.arg("--workspace").arg(workspace); + self + } + pub fn run( + &mut self, + script_name: impl AsRef, + args: impl IntoIterator>, + ) -> &mut Self { + self.arg("run").arg(script_name).args(args); + self + } +} + +#[derive(Clone, Copy, Debug, Default)] +pub struct Npm; + +impl Program for Npm { + type Command = NpmCommand; + + fn executable_name(&self) -> &'static str { + "npm" + } +} diff --git a/build/ci_utils/src/programs/npx.rs b/build/ci_utils/src/programs/npx.rs new file mode 100644 index 0000000000..c5e1c1c708 --- /dev/null +++ b/build/ci_utils/src/programs/npx.rs @@ -0,0 +1,12 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug, Default)] +pub struct Npx; + +impl Program for Npx { + fn executable_name(&self) -> &'static str { + "npx" + } +} diff --git a/build/ci_utils/src/programs/pwsh.rs b/build/ci_utils/src/programs/pwsh.rs new file mode 100644 index 0000000000..c8e8f8090b --- /dev/null +++ b/build/ci_utils/src/programs/pwsh.rs @@ -0,0 +1,39 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug)] +pub struct PwSh; + +pub mod arg { + pub const RUN_COMMAND: &str = "-Command"; + pub const RUN_FILE: &str = "-File"; +} + +impl Program for PwSh { + fn executable_name(&self) -> &'static str { + "pwsh" + } + fn executable_name_fallback() -> Vec<&'static str> { + vec!["powershell"] + } +} + +impl Shell for PwSh { + fn run_command(&self) -> Result { + let mut command = self.cmd()?; + command.arg(arg::RUN_COMMAND); + Ok(command) + } + + fn run_script(&self, script_path: impl AsRef) -> Result { + let mut command = self.run_command()?; + command.arg(arg::RUN_FILE); + command.arg(script_path.as_ref()); + Ok(command) + } + + fn run_shell(&self) -> Result { + self.cmd() + } +} diff --git a/build/ci_utils/src/programs/robocopy.rs b/build/ci_utils/src/programs/robocopy.rs new file mode 100644 index 0000000000..63982efaea --- /dev/null +++ b/build/ci_utils/src/programs/robocopy.rs @@ -0,0 +1,36 @@ +/// Windows-specific system tool for copying things. +/// +/// See https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/robocopy +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug)] +pub struct Robocopy; + +impl Program for Robocopy { + fn executable_name(&self) -> &'static str { + "robocopy" + } + + fn handle_exit_status(status: std::process::ExitStatus) -> Result { + match status.code() { + None => status.exit_ok().anyhow_err(), + Some(code) if code >= 8 => bail!("Exit with code {}.", code), + Some(_) => Ok(()), + } + } +} + +impl Robocopy {} + +pub async fn mirror_directory(source: impl AsRef, destination: impl AsRef) -> Result { + Robocopy + .cmd()? + .arg(source.as_ref()) + .arg(destination.as_ref()) + .arg("/mir") + .arg("/sl") + .run_ok() + .await +} diff --git a/build/ci_utils/src/programs/rsync.rs b/build/ci_utils/src/programs/rsync.rs new file mode 100644 index 0000000000..34befee504 --- /dev/null +++ b/build/ci_utils/src/programs/rsync.rs @@ -0,0 +1,43 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug)] +pub struct Rsync; + +impl Program for Rsync { + fn executable_name(&self) -> &'static str { + "rsync" + } +} + +#[derive(Clone, Copy, Debug, strum::AsRefStr)] +pub enum Option { + /// archive mode; equals -rlptgoD (no -H,-A,-X) + Archive, + /// delete extraneous files from dest dirs + Delete, +} + +impl AsRef for Option { + fn as_ref(&self) -> &OsStr { + OsStr::new(match self { + Self::Archive => "--archive", + Self::Delete => "--delete", + }) + } +} + +pub async fn mirror_directory(source: impl AsRef, destination: impl AsRef) -> Result { + // rsync treats "path/to/dir" and "path/to/dir/" differently. + // We want the latter (otherwise `source` would be placed inside `destination`), so we append an + // empty path segment. + let source = source.as_ref().join(""); + Rsync + .cmd()? + .args([Option::Archive, Option::Delete]) + .arg(&source) + .arg(destination.as_ref()) + .run_ok() + .await +} diff --git a/build/ci_utils/src/programs/rustc.rs b/build/ci_utils/src/programs/rustc.rs new file mode 100644 index 0000000000..3478dba4b9 --- /dev/null +++ b/build/ci_utils/src/programs/rustc.rs @@ -0,0 +1,32 @@ +use crate::prelude::*; + +use crate::program::command::Manipulator; + + + +/// Lint check or a group of such. +// Full list can be obtained by `rustc -W help`. +#[derive(Clone, Copy, Debug, strum::Display, strum::AsRefStr)] +#[strum(serialize_all = "kebab-case")] +pub enum Lint { + // == Groups == + /// All lints that are set to issue warnings. + Warnings, +} + +/// An option that can b e passed as a command line argument to rustc. +#[derive(Clone, Copy, Debug)] +pub enum Option { + /// Set lint denied + Deny(Lint), +} + +impl Manipulator for Option { + fn apply(&self, command: &mut C) { + match self { + Option::Deny(lint) => { + command.arg("--deny").arg(lint.as_ref()); + } + } + } +} diff --git a/build/ci_utils/src/programs/rustup.rs b/build/ci_utils/src/programs/rustup.rs new file mode 100644 index 0000000000..713a0a7f11 --- /dev/null +++ b/build/ci_utils/src/programs/rustup.rs @@ -0,0 +1,27 @@ +use crate::prelude::*; + + + +pub mod env { + /// The Rust toolchain version which was selected by Rustup. + /// + /// If set, any cargo invocation will follow this version. Otherwise, Rustup will deduce + /// toolchain to be used and set up this variable for the spawned process. + /// + /// Example value: `"nightly-2022-01-20-x86_64-pc-windows-msvc"`. + #[derive(Clone, Copy, Debug)] + pub struct Toolchain; + + impl crate::env::Variable for Toolchain { + const NAME: &'static str = "RUSTUP_TOOLCHAIN"; + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Rustup; + +impl Program for Rustup { + fn executable_name(&self) -> &'static str { + "rustup" + } +} diff --git a/build/ci_utils/src/programs/sbt.rs b/build/ci_utils/src/programs/sbt.rs new file mode 100644 index 0000000000..c2abb236e0 --- /dev/null +++ b/build/ci_utils/src/programs/sbt.rs @@ -0,0 +1,75 @@ +use crate::prelude::*; + + + +macro_rules! strong_string { + ($name:ident($inner_ty:ty)) => { + paste::paste! { + #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash, PartialOrd, Ord)] + pub struct $name(pub <$inner_ty as ToOwned>::Owned); + + impl $name { + pub fn new(inner: impl Into<<$inner_ty as ToOwned>::Owned>) -> Self { + Self(inner.into()) + } + } + + #[derive(Debug, Serialize, PartialEq, Eq, Hash, PartialOrd, Ord)] + pub struct [<$name Ref>]<'a>(pub &'a $inner_ty); + } + }; +} + +strong_string!(Task(str)); + +#[derive(Clone, Copy, Debug, Default)] +pub struct Sbt; + +impl Program for Sbt { + fn executable_name(&self) -> &'static str { + "sbt" + } +} + +impl Sbt { + /// Format a string with a command that will execute all the given tasks concurrently. + pub fn concurrent_tasks(tasks: impl IntoIterator>) -> String { + let mut ret = String::from("all"); + for task in tasks { + ret.push(' '); + ret.push_str(task.as_ref()) + } + ret + } +} + +#[derive(Clone, Debug)] +pub struct SystemProperty { + pub name: String, + pub value: String, +} + +impl SystemProperty { + pub fn new(name: impl Into, value: impl Into) -> Self { + Self { name: name.into(), value: value.into() } + } +} + +impl<'a> IntoIterator for &'a SystemProperty { + type Item = String; + type IntoIter = std::iter::Once; + fn into_iter(self) -> Self::IntoIter { + once(format!("-D{}={}", self.name, self.value)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn format_concurrent_tasks() { + let tasks = ["test", "syntaxJS/fullOptJS"]; + assert_eq!(Sbt::concurrent_tasks(tasks), "all test syntaxJS/fullOptJS"); + } +} diff --git a/build/ci_utils/src/programs/seven_zip.rs b/build/ci_utils/src/programs/seven_zip.rs new file mode 100644 index 0000000000..f8bc4cf3dd --- /dev/null +++ b/build/ci_utils/src/programs/seven_zip.rs @@ -0,0 +1,249 @@ +use crate::prelude::*; + +use snafu::Snafu; + + + +#[derive(Clone, Copy, Debug)] +pub struct SevenZip; + +impl Program for SevenZip { + fn executable_name(&self) -> &'static str { + "7z" + } + fn executable_name_fallback() -> Vec<&'static str> { + // 7zz is reportedly used sometimes on macOS + vec!["7za", "7zz"] + } + + fn default_locations(&self) -> Vec { + if let Ok(program_files) = std::env::var("ProgramFiles") { + let path = PathBuf::from(program_files).join("7-Zip"); + if path.exists() { + return vec![path]; + } + } + vec![] + } + + fn handle_exit_status(status: std::process::ExitStatus) -> anyhow::Result<()> { + if status.success() { + Ok(()) + } else if let Some(code) = status.code() { + Err(ExecutionError::from_exit_code(code).into()) + } else { + Err(ExecutionError::Unknown.into()) + } + } +} + +// Cf https://7zip.bugaco.com/7zip/MANUAL/cmdline/exit_codes.htm +#[derive(Snafu, Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum ExecutionError { + #[snafu(display( + "Warning (Non fatal error(s)). For example, one or more files were locked by some \ + other application, so they were not compressed." + ))] + Warning, + #[snafu(display("Fatal error"))] + Fatal, + #[snafu(display("Command line error"))] + CommandLine, + #[snafu(display("Not enough memory for operation"))] + NotEnoughMemory, + #[snafu(display("User stopped the process"))] + UserStopped, + #[snafu(display("Unrecognized error code"))] + Unknown, +} + +impl ExecutionError { + fn from_exit_code(code: i32) -> Self { + match code { + 1 => Self::Warning, + 2 => Self::Fatal, + 7 => Self::CommandLine, + 8 => Self::NotEnoughMemory, + 255 => Self::UserStopped, + _ => Self::Unknown, + } + } +} + +impl SevenZip { + pub fn add_cmd>( + &self, + output_archive: impl AsRef, + paths_to_pack: impl IntoIterator, + ) -> Result { + let output_archive = output_archive.as_ref(); + let mut cmd = self.cmd()?; + cmd.arg(ArchiveCommand::Add).args(Switch::AssumeYes).arg(output_archive); + for path in paths_to_pack { + cmd.arg(path.as_ref()); + } + Ok(cmd) + } + + /// Removes the old archive under output path if it was present. + pub async fn pack>( + &self, + output_archive: impl AsRef, + paths_to_pack: impl IntoIterator, + ) -> Result { + crate::fs::remove_if_exists(output_archive.as_ref())?; + self.add(output_archive, paths_to_pack).await + } + + pub async fn pack_directory_contents( + self, + output_archive: impl AsRef, + root_directory: impl AsRef, + ) -> Result { + // See: https://superuser.com/a/418708 + self.pack(output_archive, [root_directory.as_ref().join("*")]).await + } + + pub async fn add>( + &self, + output_archive: impl AsRef, + paths_to_pack: impl IntoIterator, + ) -> Result { + self.add_cmd(output_archive, paths_to_pack)?.run_ok().await + } + + pub fn unpack_cmd( + &self, + archive: impl AsRef, + output_directory: impl AsRef, + ) -> Result { + let out_switch = Switch::OutputDirectory(output_directory.as_ref().into()); + let mut cmd = self.cmd()?; + cmd.arg(ArchiveCommand::ExtractWithFullPaths) + .args(Switch::AssumeYes) + .args(out_switch) + .arg(archive.as_ref()); + Ok(cmd) + } + + pub fn unpack_from_stdin_cmd(&self, output_directory: impl AsRef) -> Result { + let out_switch = Switch::OutputDirectory(output_directory.as_ref().into()); + let mut cmd = self.cmd()?; + cmd.arg(ArchiveCommand::ExtractWithFullPaths) + .args(Switch::AssumeYes) + .args(out_switch) + .args(Switch::ReadFromStdin); + Ok(cmd) + } +} + +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum ArchiveCommand { + Add, + ExtractWithFullPaths, +} + +impl AsRef for ArchiveCommand { + fn as_ref(&self) -> &OsStr { + match self { + Self::Add => "a", + Self::ExtractWithFullPaths => "x", + } + .as_ref() + } +} + +// https://sevenzip.osdn.jp/chm/cmdline/switches/index.htm +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum Switch { + OutputDirectory(PathBuf), + AssumeYes, + OverwriteMode(OverwriteMode), + RedirectStream(StreamType, StreamDestination), + SetCharset(Charset), + /// Read data from standard input, rather than from a file. + ReadFromStdin, +} + +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum OverwriteMode { + OverwriteAll, + SkipExisting, + AutoRenameExtracted, + AutoRenameExisting, +} + +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum StreamType { + StandardOutput, + ErrorOutput, + ProgressInformation, +} + +impl From for OsString { + fn from(value: StreamType) -> Self { + match value { + StreamType::StandardOutput => "o", + StreamType::ErrorOutput => "e", + StreamType::ProgressInformation => "p", + } + .into() + } +} + +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum StreamDestination { + DisableStream, + RedirectToStdout, + RedirectToStderr, +} + +impl From for OsString { + fn from(value: StreamDestination) -> Self { + match value { + StreamDestination::DisableStream => "0", + StreamDestination::RedirectToStdout => "1", + StreamDestination::RedirectToStderr => "2", + } + .into() + } +} + +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum Charset { + Utf8, + Win, + Dos, +} + +impl From for OsString { + fn from(value: Charset) -> Self { + match value { + Charset::Utf8 => "UTF-8", + Charset::Win => "WIN", + Charset::Dos => "DOS", + } + .into() + } +} + +impl IntoIterator for Switch { + type Item = OsString; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + use OverwriteMode::*; + match self { + Self::OutputDirectory(dir) => vec![format!("-o{}", dir.display()).into()], + Self::AssumeYes => vec!["-y".into()], + Self::OverwriteMode(OverwriteAll) => vec!["-aoa".into()], + Self::OverwriteMode(SkipExisting) => vec!["-aos".into()], + Self::OverwriteMode(AutoRenameExtracted) => vec!["-aou".into()], + Self::OverwriteMode(AutoRenameExisting) => vec!["-aot".into()], + Self::RedirectStream(str, dest) => vec!["-bs".into(), str.into(), dest.into()], + Self::SetCharset(charset) => vec!["-scc".into(), charset.into()], + Self::ReadFromStdin => vec!["-si".into()], + } + .into_iter() + } +} diff --git a/build/ci_utils/src/programs/sh.rs b/build/ci_utils/src/programs/sh.rs new file mode 100644 index 0000000000..7c08e926ee --- /dev/null +++ b/build/ci_utils/src/programs/sh.rs @@ -0,0 +1,39 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug)] +pub struct Sh; + +impl Program for Sh { + fn executable_name(&self) -> &'static str { + "sh" + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Bash; + +impl Program for Bash { + fn executable_name(&self) -> &'static str { + "bash" + } +} + +impl Shell for Bash { + fn run_command(&self) -> Result { + let mut cmd = Bash.cmd()?; + cmd.arg("-c"); + Ok(cmd) + } + + fn run_script(&self, script_path: impl AsRef) -> Result { + let mut cmd = Bash.cmd()?; + cmd.arg(script_path.as_ref()); + Ok(cmd) + } + + fn run_shell(&self) -> Result { + self.cmd() + } +} diff --git a/build/ci_utils/src/programs/tar.rs b/build/ci_utils/src/programs/tar.rs new file mode 100644 index 0000000000..598fd6927a --- /dev/null +++ b/build/ci_utils/src/programs/tar.rs @@ -0,0 +1,322 @@ +use crate::prelude::*; + +use crate::archive::Format; + +use std::vec::IntoIter; + + + +pub mod bsd { + use super::*; + + /// Options specific for `bsdtar`. + #[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)] + pub enum Switch { + /// Symbolic links named on the command line will be followed; the target of the link will + /// be archived, not the link itself. + FollowSymlinksInCommand, + } + + impl AsRef for Switch { + fn as_ref(&self) -> &OsStr { + match self { + Switch::FollowSymlinksInCommand => "-H", + } + .as_ref() + } + } +} + +#[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum Compression { + Bzip2, + Gzip, + Lzma, + Xz, +} + +impl Compression { + pub fn deduce_from_extension(extension: impl AsRef) -> Result { + let extension = extension.as_ref().to_str().unwrap(); + if extension == "bz2" { + Ok(Compression::Bzip2) + } else if extension == "gz" { + Ok(Compression::Gzip) + } else if extension == "lzma" { + Ok(Compression::Lzma) + } else if extension == "xz" { + Ok(Compression::Xz) + } else { + bail!("The extension `{}` does not denote a supported compression algorithm for TAR archives.", extension) + } + } +} + +impl Display for Compression { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + use Compression::*; + write!(f, "{}", match self { + Bzip2 => "bzip2", + Gzip => "gzip", + Lzma => "lzma", + Xz => "xz", + }) + } +} + +impl AsRef for Compression { + fn as_ref(&self) -> &str { + match self { + Compression::Bzip2 => "-j", + Compression::Gzip => "-z", + Compression::Lzma => "--lzma", + Compression::Xz => "-J", + } + } +} + +impl AsRef for Compression { + fn as_ref(&self) -> &OsStr { + let str: &str = self.as_ref(); + str.as_ref() + } +} + +#[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum Switch<'a> { + TargetFile(&'a Path), + Verbose, + UseFormat(Compression), + WorkingDir(&'a Path), +} + +impl<'a> IntoIterator for &'a Switch<'a> { + type Item = &'a OsStr; + type IntoIter = IntoIter<&'a OsStr>; + + fn into_iter(self) -> Self::IntoIter { + match self { + Switch::TargetFile(tgt) => vec!["-f".as_ref(), tgt.as_ref()], + Switch::Verbose => vec!["--verbose".as_ref()], + Switch::UseFormat(compression) => vec![compression.as_ref()], + Switch::WorkingDir(dir) => vec!["--directory".as_ref(), dir.as_ref()], + } + .into_iter() + } +} + +#[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum Flavor { + Gnu, + Bsd, +} + +impl Flavor { + pub fn from_version_text(text: &str) -> Result { + if text.contains("bsdtar") { + Ok(Flavor::Bsd) + } else if text.contains("GNU tar") { + Ok(Flavor::Gnu) + } else { + bail!("The output of `tar --version` does not contain a recognizable flavor. The version text was: {text}") + } + } +} + +#[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub enum Command { + Append, + Create, + Extract, + List, +} + +impl AsRef for Command { + fn as_ref(&self) -> &str { + match self { + Command::Append => "-r", + Command::Create => "-c", + Command::Extract => "-x", + Command::List => "-t", + } + } +} + +impl AsRef for Command { + fn as_ref(&self) -> &OsStr { + let str: &str = self.as_ref(); + str.as_ref() + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Tar; + +impl Program for Tar { + fn executable_name(&self) -> &'static str { + "tar" + } +} + +impl Tar { + pub async fn flavor(&self) -> Result { + let text = self.version_string().await?; + Flavor::from_version_text(&text) + } + + #[context("Failed to crate an archive {}.", output_archive.as_ref().display())] + pub fn pack_cmd>( + &self, + output_archive: impl AsRef, + paths_to_pack: impl IntoIterator, + ) -> Result { + let mut cmd = self.cmd()?; + cmd.arg(Command::Create); + + if let Ok(Format::Tar(Some(compression))) = Format::from_filename(&output_archive) { + cmd.args(&Switch::UseFormat(compression)); + } + + cmd.args(&Switch::TargetFile(output_archive.as_ref())); + + let paths: Vec = + paths_to_pack.into_iter().map(|path| path.as_ref().to_owned()).collect(); + + match paths.as_slice() { + [item] => + if let Some(parent) = crate::fs::canonicalize(item)?.parent() { + cmd.args(&Switch::WorkingDir(parent)); + cmd.arg(item.file_name().unwrap()); // None can happen only when path ends with + // ".." - that's why we canonicalize + }, + // [dir] if dir.is_dir() => { + // cmd.args(&Switch::WorkingDir(dir.to_owned())); + // cmd.arg("."); + // } + _ => { + todo!("") + } /* paths => { + * if let Some(parent) = output_archive.as_ref().parent() { + * cmd.arg(Switch::WorkingDir(parent.to_owned()).format_arguments()); + * for path_to_pack in paths { + * if path_to_pack.is_absolute() { + * pathdiff::diff_paths(parent, path_to_pack).ok_or_else(|| + * anyhow!("failed to relativize paths {} {}", parent, path_to_pack)) + * } + * cmd.arg(&path_to_pack); + * }, + * } + * } */ + } + + + Ok(cmd) + // cmd_from_args![Command::Create, val [switches], output_archive.as_ref(), ref + // [paths_to_pack]] + } + + pub async fn pack>( + self, + output_archive: impl AsRef, + paths_to_pack: impl IntoIterator, + ) -> Result { + self.pack_cmd(output_archive, paths_to_pack)?.run_ok().await + } + + pub async fn pack_directory_contents( + self, + compression: Option, + output_archive: impl AsRef, + root_directory: impl AsRef, + ) -> Result { + // See: https://stackoverflow.com/a/3035446 + let mut cmd = self.cmd()?; + cmd.arg(Command::Create) + .args(compression) + .args(&Switch::TargetFile(output_archive.as_ref())) + .args(&Switch::WorkingDir(root_directory.as_ref())); + if TARGET_OS == OS::Windows && Tar.flavor().await.contains(&Flavor::Bsd) { + // Used only when `tar` is `bsdtar`. This is the default + // but e.g. Git can come with its own non-bsd tar. GNU tar does not support this option. + // + // This flag is to tell `tar` to resolve symlinks that appear on the command line. + // On Windows when "." is a symlink, only the symlink is archived otherwise. + cmd.arg(bsd::Switch::FollowSymlinksInCommand); + } + + cmd.arg(".").run_ok().await + } + + pub async fn unpack( + &self, + archive: impl AsRef, + output_directory: impl AsRef, + ) -> Result { + crate::fs::tokio::create_dir_if_missing(&output_directory).await?; + self.cmd()? + .arg(Command::Extract) + .args(&Switch::TargetFile(archive.as_ref())) + .args(&Switch::WorkingDir(output_directory.as_ref())) + .run_ok() + .await + } +} + + +#[cfg(test)] +pub mod tests { + use super::*; + use crate::archive::extract_to; + use crate::archive::pack_directory_contents; + use crate::log::setup_logging; + + #[test] + fn deduce_format_from_extension() { + let expect_ok = |str: &str, expected: Compression| { + assert_eq!(Compression::deduce_from_extension(OsStr::new(str)).unwrap(), expected); + }; + + expect_ok("bz2", Compression::Bzip2); + expect_ok("gz", Compression::Gzip); + expect_ok("lzma", Compression::Lzma); + expect_ok("xz", Compression::Xz); + } + + #[tokio::test] + async fn test_directory_packing() -> Result { + setup_logging()?; + let archive_temp = tempfile::tempdir()?; + let archive_path = archive_temp.path().join("archive.tar.gz"); + + + let temp = tempfile::tempdir()?; + let filename = "bar.txt"; + crate::fs::tokio::write(temp.path().join(filename), "bar contents").await?; + + let linked_temp = archive_temp.path().join("linked"); + symlink::symlink_dir(temp.path(), &linked_temp)?; + + pack_directory_contents(&archive_path, &linked_temp).await?; + assert!(archive_path.exists()); + assert!(archive_path.metadata()?.len() > 0); + + let temp2 = tempfile::tempdir()?; + extract_to(&archive_path, temp2.path()).await?; + assert!(temp2.path().join(filename).exists()); + assert_eq!( + crate::fs::tokio::read(temp2.path().join(filename)).await?, + "bar contents".as_bytes() + ); + + + Ok(()) + } + + #[test] + #[ignore] + fn pack_command_test() { + let cmd = Tar.pack_cmd("output.tar.gz", ["target.bmp"]).unwrap(); + debug!("{:?}", cmd); + dbg!(cmd); + } +} diff --git a/build/ci_utils/src/programs/vs.rs b/build/ci_utils/src/programs/vs.rs new file mode 100644 index 0000000000..f86189288c --- /dev/null +++ b/build/ci_utils/src/programs/vs.rs @@ -0,0 +1,54 @@ +use crate::prelude::*; + +use crate::programs::cmd; +use crate::programs::vswhere::VsWhere; + + + +/// Microsoft C/C++ Optimizing compiler. +/// +/// A possible component of Microsoft Visual Studio IDE, or part of the self-contained Microsoft +/// Visual C++ Build Tools. +#[derive(Clone, Copy, Debug)] +pub struct Cl; + +impl Program for Cl { + fn executable_name(&self) -> &'static str { + "cl" + } +} + +pub async fn apply_dev_environment() -> Result { + let msvc = VsWhere::msvc().await?; + let path = msvc.installation_path.join_iter(["VC", "Auxiliary", "Build", "vcvarsall.bat"]); + let changes = cmd::compare_env(|command| { + // The telemetry introduces undesired dependency on Power Shell. We should not need it to + // just set a few environment variables. + command.arg(path).arg("x64").env("VSCMD_SKIP_SENDTELEMETRY", "true") + }) + .await?; + for change in changes { + change.apply()?; + } + Ok(()) +} + +/// Serialization follows the VS Where `productLineVersion` format. +#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug, Serialize, Deserialize)] +pub enum Version { + #[serde(rename = "2017")] + VS2017, + #[serde(rename = "2019")] + VS2019, + #[serde(rename = "2022")] + VS2022, +} + +#[tokio::test] +#[ignore] +async fn foo() -> Result { + // let old_vars = dbg!(std::env::vars_os().map(|(name, _)| name).collect_vec()); + apply_dev_environment().await?; + // let new_vars = dbg!(std::env::vars_os().collect_vec()); + Ok(()) +} diff --git a/build/ci_utils/src/programs/vswhere.rs b/build/ci_utils/src/programs/vswhere.rs new file mode 100644 index 0000000000..865b1e527d --- /dev/null +++ b/build/ci_utils/src/programs/vswhere.rs @@ -0,0 +1,272 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug)] +pub struct VsWhere; + +impl Program for VsWhere { + fn default_locations(&self) -> Vec { + let dir_opt = crate::platform::win::program_files_x86() + .map(|program_files| program_files.join("Microsoft Visual Studio").join("Installer")); + Vec::from_iter(dir_opt) + } + + fn executable_name(&self) -> &'static str { + "vswhere" + } +} + +impl VsWhere { + pub async fn find_all_with(component: Component) -> Result> { + let mut command = VsWhere.cmd()?; + command + .args(Option::Format(Format::Json).format_arguments()) + .args(Option::Required(vec![component]).format_arguments()) + .args(Option::ForceUTF8.format_arguments()); + + let stdout = command.run_stdout().await?; + serde_json::from_str(&stdout).anyhow_err() + } + + pub async fn find_with(component: Component) -> Result { + let mut command = VsWhere.cmd()?; + command + .args(Option::Format(Format::Json).format_arguments()) + .args(Option::Required(vec![component]).format_arguments()) + .args(Option::ForceUTF8.format_arguments()) + .args(["-products", "*"]); // FIXME add types + + let stdout = command.run_stdout().await?; + let instances = serde_json::from_str::>(&stdout)?; + Ok(instances.into_iter().next().ok_or(NoMsvcInstallation)?) + } + + /// Looks up installation of Visual Studio that has installed + /// `MSVC v142 - VS 2019 C++ x64/x86 build tools (v14.28)` component. + /// E.g. "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community" + pub async fn msvc() -> Result { + Self::find_with(Component::CppBuildTools).await + } + + pub async fn with_msbuild() -> Result { + Self::find_with(Component::MsBuild).await + } +} + +#[derive(Clone, Copy, Debug, Snafu)] +#[snafu(display("failed to find a MSVC installation"))] +pub struct NoMsvcInstallation; + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct InstanceInfo { + pub install_date: chrono::DateTime, + /// Example: C:\\Program Files\\Microsoft Visual Studio\\2022\\Community + pub installation_path: PathBuf, + pub installation_version: String, + pub is_prerelease: bool, + pub display_name: String, + pub catalog: Catalog, +} + +#[derive(Serialize, Deserialize, Debug, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub struct Catalog { + pub product_line_version: crate::programs::vs::Version, + /* "buildBranch": "d16.8", + * "buildVersion": "16.8.30711.63", + * "id": "VisualStudio/16.8.1+30711.63", + * "localBuild": "build-lab", + * "manifestName": "VisualStudio", + * "manifestType": "installer", + * "productDisplayVersion": "16.8.1", + * "productLine": "Dev16", + * "productMilestone": "RTW", + * "productMilestoneIsPreRelease": "False", + * "productName": "Visual Studio", + * "productPatchVersion": "1", + * "productPreReleaseMilestoneSuffix": "1.0", + * "productSemanticVersion": "16.8.1+30711.63", + * "requiredEngineVersion": "2.8.3267.30329" */ +} + +#[derive(Clone, Debug)] +pub enum Option { + /// Output format. + Format(Format), + /// One or more workload or component IDs required when finding instances. + /// All specified IDs must be installed unless -requiresAny is specified. + Required(Vec), + /// Forces output to be written as UTF-8, regardless of the code page. + ForceUTF8, +} + +impl Option { + fn format_arguments(&self) -> Vec { + match self { + Self::Format(fmt) => vec!["-format".into(), fmt.into()], + Self::Required(components) => { + let mut args = vec!["-requires".into()]; + for component in components { + args.push(component.into()) + } + args + } + Self::ForceUTF8 => vec!["-utf8".into()], + } + } +} + +#[derive(Clone, Copy, Debug)] +pub enum Format { + Json, + Text, + Value, + Xml, +} + +impl From<&Format> for OsString { + fn from(fmt: &Format) -> Self { + match fmt { + Format::Json => "json", + Format::Text => "text", + Format::Value => "value", + Format::Xml => "xml", + } + .into() + } +} + +// cf. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-community?view=vs-2019&preserve-view=true +#[derive(Clone, Copy, Debug)] +pub enum Component { + /// MSVC v142 - VS 2019 C++ x64/x86 build tools + CppBuildTools, + /// MSBuild + MsBuild, +} + +impl From<&Component> for OsString { + fn from(value: &Component) -> Self { + match value { + // cf. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-community?view=vs-2019&preserve-view=true + Component::CppBuildTools => "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + Component::MsBuild => "Microsoft.Component.MSBuild", + } + .into() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + #[ignore] + async fn vswhere() { + let _ = dbg!(VsWhere::msvc().await); + } + + #[test] + fn parse() { + let sample_out = r#" +[ + { + "instanceId": "a7578c88", + "installDate": "2019-04-02T19:34:05Z", + "installationName": "VisualStudio/16.8.1+30711.63", + "installationPath": "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community", + "installationVersion": "16.8.30711.63", + "productId": "Microsoft.VisualStudio.Product.Community", + "productPath": "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\Common7\\IDE\\devenv.exe", + "state": 4294967295, + "isComplete": true, + "isLaunchable": true, + "isPrerelease": false, + "isRebootRequired": false, + "displayName": "Visual Studio Community 2019", + "description": "Zaawansowane środowisko IDE — bezpłatne dla uczniów i studentów, współautorów oprogramowania open source oraz indywidualnych osób", + "channelId": "VisualStudio.16.Release", + "channelUri": "https://aka.ms/vs/16/release/channel", + "enginePath": "C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\resources\\app\\ServiceHub\\Services\\Microsoft.VisualStudio.Setup.Service", + "releaseNotes": "https://go.microsoft.com/fwlink/?LinkId=660893#16.8.1", + "thirdPartyNotices": "https://go.microsoft.com/fwlink/?LinkId=660909", + "updateDate": "2020-11-12T21:48:39.0758481Z", + "catalog": { + "buildBranch": "d16.8", + "buildVersion": "16.8.30711.63", + "id": "VisualStudio/16.8.1+30711.63", + "localBuild": "build-lab", + "manifestName": "VisualStudio", + "manifestType": "installer", + "productDisplayVersion": "16.8.1", + "productLine": "Dev16", + "productLineVersion": "2019", + "productMilestone": "RTW", + "productMilestoneIsPreRelease": "False", + "productName": "Visual Studio", + "productPatchVersion": "1", + "productPreReleaseMilestoneSuffix": "1.0", + "productSemanticVersion": "16.8.1+30711.63", + "requiredEngineVersion": "2.8.3267.30329" + }, + "properties": { + "campaignId": "535420412.1544277453", + "channelManifestId": "VisualStudio.16.Release/16.8.1+30711.63", + "nickname": "", + "setupEngineFilePath": "C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\vs_installershell.exe" + } + }, + { + "instanceId": "aa771714", + "installDate": "2018-12-08T14:06:40Z", + "installationName": "VisualStudio/15.9.15+28307.812", + "installationPath": "C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community", + "installationVersion": "15.9.28307.812", + "productId": "Microsoft.VisualStudio.Product.Community", + "productPath": "C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\Common7\\IDE\\devenv.exe", + "state": 4294967295, + "isComplete": true, + "isLaunchable": true, + "isPrerelease": false, + "isRebootRequired": false, + "displayName": "Visual Studio Community 2017", + "description": "Bezpłatne, w pełni funkcjonalne środowisko IDE dla studentów oraz programistów indywidualnych i tworzących rozwiązania open source", + "channelId": "VisualStudio.15.Release", + "channelUri": "https://aka.ms/vs/15/release/channel", + "enginePath": "C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\resources\\app\\ServiceHub\\Services\\Microsoft.VisualStudio.Setup.Service", + "releaseNotes": "https://go.microsoft.com/fwlink/?LinkId=660692#15.9.15", + "thirdPartyNotices": "https://go.microsoft.com/fwlink/?LinkId=660708", + "updateDate": "2019-08-15T16:21:01.6235246Z", + "catalog": { + "buildBranch": "d15.9", + "buildVersion": "15.9.28307.812", + "id": "VisualStudio/15.9.15+28307.812", + "localBuild": "build-lab", + "manifestName": "VisualStudio", + "manifestType": "installer", + "productDisplayVersion": "15.9.15", + "productLine": "Dev15", + "productLineVersion": "2017", + "productMilestone": "RTW", + "productMilestoneIsPreRelease": "False", + "productName": "Visual Studio", + "productPatchVersion": "15", + "productPreReleaseMilestoneSuffix": "1.0", + "productRelease": "RTW", + "productSemanticVersion": "15.9.15+28307.812", + "requiredEngineVersion": "1.18.1049.33485" + }, + "properties": { + "campaignId": "535420412.1544277453", + "channelManifestId": "VisualStudio.15.Release/15.9.15+28307.812", + "nickname": "", + "setupEngineFilePath": "C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\vs_installershell.exe" + } + } +]"#; + let ret = serde_json::from_str::>(sample_out); + assert!(ret.is_ok()); + } +} diff --git a/build/ci_utils/src/programs/wasm_opt.rs b/build/ci_utils/src/programs/wasm_opt.rs new file mode 100644 index 0000000000..a59b288c7e --- /dev/null +++ b/build/ci_utils/src/programs/wasm_opt.rs @@ -0,0 +1,85 @@ +use crate::prelude::*; + +use crate::program::command::Manipulator; +use crate::program::version::IsVersion; + + + +#[derive(Clone, Copy, Debug, strum::Display, strum::EnumString)] +pub enum OptimizationLevel { + /// execute default optimization passes (equivalent to -Os) + O, + /// execute no optimization passes + O0, + /// execute -O1 optimization passes (quick&useful opts, useful for iteration builds) + O1, + /// execute -O2 optimization passes (most opts, generally gets most perf) + O2, + /// execute -O3 optimization passes (spends potentially a lot of time optimizing) + O3, + /// execute -O4 optimization passes (also flatten the IR, which can take a lot more time and + /// memory, but is useful on more nested / complex / less-optimized input) + O4, + /// execute default optimization passes, focusing on code size + Os, + /// execute default optimization passes, super-focusing on code size + Oz, +} + +impl Manipulator for OptimizationLevel { + fn apply(&self, command: &mut C) { + let flag = format!("-{self}"); + command.arg(flag); + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Output<'a>(pub &'a Path); + +impl Manipulator for Output<'_> { + fn apply(&self, command: &mut C) { + command.arg("-o").arg(self.0); + } +} + +#[derive(Clone, Copy, Debug)] +pub struct WasmOpt; + +impl Program for WasmOpt { + type Version = Version; + fn executable_name(&self) -> &str { + "wasm-opt" + } +} + +// wasm-opt (like the whole binaryen) uses a single number as a version. +#[derive(Clone, Copy, Debug, Display, PartialEq, PartialOrd, Shrinkwrap, Eq)] +pub struct Version(pub u32); + +impl std::str::FromStr for Version { + type Err = ::Err; + fn from_str(s: &str) -> std::result::Result { + std::str::FromStr::from_str(s).map(Self) + } +} + +impl IsVersion for Version { + fn find_in_text_internal(text: &str) -> Result { + let number_regex = regex::Regex::new(r#"\d+"#)?; + let number_match = number_regex.find(text).context("No number in the given text.")?; + let number_text = number_match.as_str(); + number_text.parse2() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn version_parsing() -> Result { + let sample_version_string = "wasm-opt version 108 (version_108)"; + assert_eq!(WasmOpt.parse_version(sample_version_string)?, Version(108)); + Ok(()) + } +} diff --git a/build/ci_utils/src/programs/wasm_pack.rs b/build/ci_utils/src/programs/wasm_pack.rs new file mode 100644 index 0000000000..98eee722c5 --- /dev/null +++ b/build/ci_utils/src/programs/wasm_pack.rs @@ -0,0 +1,121 @@ +use crate::prelude::*; + +use crate::new_command_type; +use crate::program::command::Manipulator; +use crate::programs::Cargo; + +use tempfile::TempDir; + + + +/// What kind of Cargo build profile should be used. +/// +/// Typically affects optimization, debug symbol generation and so. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Display, strum::AsRefStr)] +#[strum(serialize_all = "kebab-case")] +pub enum Profile { + Dev, + Release, + Profile, +} + +impl AsRef for Profile { + fn as_ref(&self) -> &OsStr { + OsStr::new(match self { + Profile::Dev => "--dev", + Profile::Release => "--release", + Profile::Profile => "--profile", + }) + } +} + +#[derive(Clone, Copy, Debug, Display)] +pub enum Target { + Bundler, + NodeJs, + Web, + NoModules, +} + +impl AsRef for Target { + fn as_ref(&self) -> &OsStr { + OsStr::new(match self { + Target::Bundler => "bundler", + Target::NodeJs => "nodejs", + Target::Web => "web", + Target::NoModules => "no-modules", + }) + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, strum::AsRefStr)] +#[strum(serialize_all = "kebab-case")] +pub enum TestFlags { + Chrome, + Firefox, + Headless, + Node, + Release, + Safari, +} + +impl Manipulator for TestFlags { + fn apply(&self, command: &mut C) { + command.arg(format!("--{}", self.as_ref())); + } +} + +#[derive(Clone, Copy, Debug)] +pub struct WasmPack; + +impl Program for WasmPack { + type Command = WasmPackCommand; + fn executable_name(&self) -> &'static str { + "wasm-pack" + } +} + + +new_command_type! {WasmPack, WasmPackCommand} + +impl WasmPackCommand { + pub fn build(&mut self) -> &mut Self { + self.arg("build") + } + + pub fn test(&mut self) -> &mut Self { + self.arg("test") + } + + pub fn target(&mut self, target: Target) -> &mut Self { + self.arg("--target").arg(target) + } + + /// Sets the output directory with a relative path. + pub fn output_directory(&mut self, output_path: impl AsRef) -> &mut Self { + self.arg("--out-dir").arg(output_path.as_ref()) + } + + /// Sets the output file names. Defaults to package name. + pub fn output_name(&mut self, output_name: impl AsRef) -> &mut Self { + self.arg("--out-name").arg(output_name.as_ref()) + } +} + +// new_command_type! {WasmPack, WasmPackBuildCommand} + +pub async fn install_if_missing() -> Result { + let temp = TempDir::new()?; + // We want to run this command in a temporary directory, as to install wasm-pack using a + // system-wide default toolchain, rather than overrides for the current folder (which is likely + // under our repository root). + // + // Note that this will install the tool to the default system-wide location, not temp. + if WasmPack.lookup().is_err() { + Cargo.cmd()?.args(["install", "wasm-pack"]).current_dir(temp.path()).run_ok().await?; + // TODO + // this kind of function likely could use some generalization, that should also cover how + // PATH updates are handled + } + Ok(()) +} diff --git a/build/ci_utils/src/reqwest.rs b/build/ci_utils/src/reqwest.rs new file mode 100644 index 0000000000..04c3b3169b --- /dev/null +++ b/build/ci_utils/src/reqwest.rs @@ -0,0 +1,58 @@ +use crate::prelude::*; + +use reqwest::header::HeaderValue; +use reqwest::header::InvalidHeaderValue; +use std::fmt::Formatter; +use std::ops::RangeInclusive; + + + +#[derive(Clone, Debug)] +pub struct ContentRange { + pub range: RangeInclusive, + pub total: Option, +} + +impl ContentRange { + pub fn whole(len: usize) -> Self { + Self { range: 0..=len.saturating_sub(1), total: Some(len) } + } + + /// Range length in bytes. + pub fn len(&self) -> usize { + 1 + self.range.end() - self.range.start() + } + + /// Check if the range is empty. + pub fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +impl TryFrom for HeaderValue { + type Error = InvalidHeaderValue; + + fn try_from(value: ContentRange) -> std::result::Result { + value.to_string().try_into() + } +} + +impl TryFrom<&ContentRange> for HeaderValue { + type Error = InvalidHeaderValue; + + fn try_from(value: &ContentRange) -> std::result::Result { + value.to_string().try_into() + } +} + +impl Display for ContentRange { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "bytes {}-{}/{}", + self.range.start(), + self.range.end(), + self.total.map_or(String::from("*"), |total| total.to_string()) + ) + } +} diff --git a/build/ci_utils/src/serde.rs b/build/ci_utils/src/serde.rs new file mode 100644 index 0000000000..b04105f281 --- /dev/null +++ b/build/ci_utils/src/serde.rs @@ -0,0 +1,101 @@ +//! Utilities for serialization and deserialization using `serde`. + +use crate::prelude::*; + +use serde::de::Error; +use serde::Deserializer; +use serde::Serializer; + + + +#[derive(Clone, Debug, Deserialize, PartialEq, Eq)] +#[serde(untagged)] +pub enum Either { + Left(T), + Right(U), +} + +impl Either +where T: Into +{ + pub fn into_right(self) -> U { + match self { + Either::Left(t) => t.into(), + Either::Right(r) => r, + } + } +} + +pub trait WithShorthand<'a, Shorthand: Sized + Deserialize<'a>>: Deserialize<'a> { + fn resolve(short: Shorthand) -> Self; + + fn de(de: D) -> std::result::Result + where D: Deserializer<'a> { + Either::::deserialize(de).map(|e| match e { + Either::Left(shorthand) => Self::resolve(shorthand), + Either::Right(value) => value, + }) + } +} + +impl<'a, T: Deserialize<'a>> WithShorthand<'a, T> for Vec { + fn resolve(short: T) -> Self { + vec![short] + } +} + + +#[derive(Clone, Debug, Deserialize, PartialEq, Eq)] +#[serde(untagged)] +pub enum SingleOrSequence { + Single(T), + Sequence(Vec), +} + +impl From> for Vec { + fn from(value: SingleOrSequence) -> Self { + match value { + SingleOrSequence::Single(value) => vec![value], + SingleOrSequence::Sequence(values) => values, + } + } +} + +/// Function to be used as `#[serde(deserialize_with="single_or_sequence")]`. +/// +/// It allows deserializing a single T value into Vec, rather than requiring being provided with +/// a single element list for such case. +pub fn single_or_sequence<'de, D, T>(de: D) -> std::result::Result, D::Error> +where + D: Deserializer<'de>, + T: Deserialize<'de>, { + WithShorthand::de(de) +} + +/// Module to be used as `#[serde(with="regex_vec")]` +/// +/// It supports serialization of `Vec` through either a single `String` or `String` sequence. +pub mod regex_vec { + use super::*; + + use regex::Regex; + + /// See [`regex_vec`]. + pub fn serialize(value: &[Regex], ser: S) -> std::result::Result + where S: Serializer { + ser.collect_seq(value.iter().map(Regex::as_str)) + } + + /// See [`regex_vec`]. + pub fn deserialize<'de, D: Deserializer<'de>>( + de: D, + ) -> std::result::Result, D::Error> { + let regex_texts: Vec = single_or_sequence(de)?; + regex_texts + .iter() + .map(String::as_str) + .map(Regex::new) + .collect::, _>>() + .map_err(D::Error::custom) + } +} diff --git a/build/cli/Cargo.toml b/build/cli/Cargo.toml new file mode 100644 index 0000000000..b3782c40c6 --- /dev/null +++ b/build/cli/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "enso-build-cli" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +anyhow = "1.0.57" +byte-unit = { version = "4.0.14", features = ["serde"] } +clap = { version = "3.1.5", features = ["derive", "env", "wrap_help"] } +chrono = "0.4.19" +derivative = "2.2.0" +enso-build = { path = "../build" } +enso-formatter = { path = "../enso-formatter" } +futures = "0.3.17" +futures-util = "0.3.17" +glob = "0.3.0" +humantime = "2.1.0" +ide-ci = { path = "../ci_utils" } +octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [ + "rustls" +] } +serde = { version = "1.0.130", features = ["derive"] } +serde_json = "1.0.68" +serde_yaml = "0.9.10" +strum = { version = "0.24.0", features = ["derive"] } +tempfile = "3.2.0" +tokio = { workspace = true } +toml = "0.5.9" +tracing = { version = "0.1.32" } +tracing-subscriber = "0.3.11" diff --git a/build/cli/src/arg.rs b/build/cli/src/arg.rs new file mode 100644 index 0000000000..8a5f3cc1dd --- /dev/null +++ b/build/cli/src/arg.rs @@ -0,0 +1,278 @@ +use enso_build::prelude::*; + +use clap::Arg; +use clap::ArgEnum; +use clap::Args; +use clap::Parser; +use clap::Subcommand; +use derivative::Derivative; +use ide_ci::cache; +use ide_ci::extensions::path::display_fmt; +use ide_ci::models::config::RepoContext; +use octocrab::models::RunId; + + +// ============== +// === Export === +// ============== + +pub mod backend; +pub mod engine; +pub mod git_clean; +pub mod gui; +pub mod ide; +pub mod java_gen; +pub mod project_manager; +pub mod release; +pub mod runtime; +pub mod wasm; + + + +/// The prefix that will be used when reading the build script arguments from environment. +pub const ENVIRONMENT_VARIABLE_NAME_PREFIX: &str = "ENSO_BUILD"; + +pub const DEFAULT_REMOTE_REPOSITORY_FALLBACK: &str = "enso-org/enso"; + +pub fn default_repo_path() -> Option { + enso_build::repo::deduce_repository_path().ok() +} + +pub fn default_repo_remote() -> RepoContext { + ide_ci::actions::env::GITHUB_REPOSITORY + .get() + .unwrap_or_else(|_| RepoContext::from_str(DEFAULT_REMOTE_REPOSITORY_FALLBACK).unwrap()) +} + +pub fn default_cache_path() -> Option { + cache::default_path().ok() +} + +/// Extensions to the `clap::Arg`, intended to be used as argument attributes. +pub trait ArgExt<'h>: Sized + 'h { + /// Allow setting argument through an environment variable prefixed with Enso Build name. + fn enso_env(self) -> Self; +} + +impl<'h> ArgExt<'h> for Arg<'h> { + fn enso_env(self) -> Self { + self.prefixed_env(ENVIRONMENT_VARIABLE_NAME_PREFIX) + } +} + +/// We pass CLI paths through this to make sure that they are resolved against the initial +/// working directory, not whatever it will be set to later. +pub fn normalize_path(path: &str) -> Result { + let ret = PathBuf::from(path); + let ret = ret.absolutize()?; + Ok(ret.to_path_buf()) +} + +/// Collection of strings used by CLI that are specific to a given target. +/// +/// Having a common interface to them allows reusing code for `clap`-based structures. +pub trait IsTargetSource { + const SOURCE_NAME: &'static str; + const PATH_NAME: &'static str; + const OUTPUT_PATH_NAME: &'static str; + const RUN_ID_NAME: &'static str; + const RELEASE_DESIGNATOR_NAME: &'static str; + const ARTIFACT_NAME_NAME: &'static str; + const DEFAULT_OUTPUT_PATH: &'static str; + + type BuildInput: Clone + Debug + PartialEq + Args + Send + Sync; +} + +pub trait IsWatchableSource: IsTargetSource { + type WatchInput: Clone + Debug + PartialEq + Args + Send + Sync; +} + +#[macro_export] +macro_rules! source_args_hlp { + ($target:ty, $prefix:literal, $inputs:ty) => { + impl $crate::arg::IsTargetSource for $target { + const SOURCE_NAME: &'static str = concat!($prefix, "-", "source"); + const PATH_NAME: &'static str = concat!($prefix, "-", "path"); + const OUTPUT_PATH_NAME: &'static str = concat!($prefix, "-", "output-path"); + const RUN_ID_NAME: &'static str = concat!($prefix, "-", "run-id"); + const RELEASE_DESIGNATOR_NAME: &'static str = concat!($prefix, "-", "release"); + const ARTIFACT_NAME_NAME: &'static str = concat!($prefix, "-", "artifact-name"); + const DEFAULT_OUTPUT_PATH: &'static str = concat!("dist/", $prefix); + + type BuildInput = $inputs; + } + }; +} + +#[allow(clippy::large_enum_variant)] +#[derive(Subcommand, Clone, Debug)] +pub enum Target { + /// Build/Test the Rust part of the GUI. + Wasm(wasm::Target), + /// Build/Run GUI that consists of WASM and JS parts. This is what we deploy to cloud. + Gui(gui::Target), + /// Enso Engine Runtime. + Runtime(runtime::Target), + // /// Project Manager package (just the binary, no Engine) + // ProjectManager(project_manager::Target), + // /// Enso Engine distribution. + // Engine(engine::Target), + /// Build/Get Project Manager bundle (includes Enso Engine with GraalVM Runtime). + Backend(backend::Target), + /// Build/Run/Test IDE bundle (includes GUI and Project Manager). + Ide(ide::Target), + /// Clean the repository. Keeps the IntelliJ's .idea directory intact. WARNING: This removes + /// files that are not under version control in the repository subtree. + GitClean(git_clean::Options), + /// Lint the codebase. + Lint, + /// Apply automatic formatters on the repository. + #[clap(alias = "format")] + Fmt, + /// Release-related subcommand. + Release(release::Target), + /// Regenerate GitHub Actions workflows. + CiGen, + /// Regenerate `syntax2` library (new parser). + JavaGen(java_gen::Target), + /// Check if the changelog has been updated. Requires CI environment. + ChangelogCheck, +} + +/// Build, test and package Enso Engine. +#[derive(Clone, Debug, Parser)] +#[clap(author, version, about, long_about = None)] +pub struct Cli { + /// Path to the directory with sources to be built, typically the root of the 'enso' + /// repository's working copy. + #[clap(long, global = true, maybe_default_os = default_repo_path(), enso_env())] + pub repo_path: PathBuf, + + /// Where build script will cache some of the third-party artifacts (like network downloads). + #[clap(long, global = true, maybe_default_os = default_cache_path(), enso_env())] + pub cache_path: PathBuf, + + /// The GitHub repository with the project. This is mainly used to manage releases (checking + /// released versions to generate a new one, or uploading release assets). + /// The argument should follow the format `owner/repo_name`. + #[clap(long, global = true, default_value_t = default_repo_remote(), enso_env())] + pub repo_remote: RepoContext, + + /// The build kind. Affects the default version generation. + #[clap(long, global = true, arg_enum, default_value_t = enso_build::version::BuildKind::Dev, env = crate::BuildKind::NAME)] + pub build_kind: enso_build::version::BuildKind, + + /// Platform to target. Currently cross-compilation is enabled only for GUI/IDE (without + /// Project Manager) on platforms where Electron Builder supports this. + #[clap(long, global = true, default_value_t = TARGET_OS, enso_env(), possible_values=[OS::Windows.as_str(), OS::Linux.as_str(), OS::MacOS.as_str()])] + pub target_os: OS, + + /// Does not check the program version requirements defined in the build-config.yaml. + #[clap(long, global = true, enso_env())] + pub skip_version_check: bool, + + /// Whether built artifacts should be uploaded as part of CI run. Ignored in non-CI + /// environment. + #[clap(long, global = true, hide = !ide_ci::actions::workflow::is_in_env(), parse(try_from_str), default_value_t = true, enso_env())] + pub upload_artifacts: bool, + + #[clap(subcommand)] + pub target: Target, +} + +/// Describe where to get a target artifacts from. +/// +/// This is the CLI representation of a [crate::source::Source] for a given target. +#[derive(Args, Clone, Debug, PartialEq)] +pub struct Source { + /// How the given target should be acquired. + #[clap(name = Target::SOURCE_NAME, arg_enum, long, default_value_t= SourceKind::Build, + enso_env(), + default_value_if(Target::RUN_ID_NAME, None, Some("ci-run")), + default_value_if(Target::PATH_NAME, None, Some("local")), + default_value_if(Target::RELEASE_DESIGNATOR_NAME, None, Some("release")))] + pub source: SourceKind, + + /// If source is `local`, this argument is used to give the path with the component. + /// If missing, the default would-be output directory for this component shall be used. + #[clap(name = Target::PATH_NAME, long, default_value=Target::DEFAULT_OUTPUT_PATH, enso_env())] + pub path: PathBuf, + + /// If source is `run`, this argument is required to provide CI run ID. + /// + /// `GITHUB_TOKEN` environment variable with "repo" access is required to download CI run + /// artifacts. + #[clap(name = Target::RUN_ID_NAME, long, required_if_eq(Target::SOURCE_NAME, "ci-run"), enso_env())] + pub run_id: Option, + + /// Artifact name to be used when downloading a run artifact. If not set, the default name for + /// given target will be used. + #[clap(name = Target::ARTIFACT_NAME_NAME, long, enso_env())] + pub artifact_name: Option, + + /// If source is `release`, this argument is required to identify a release with asset to + /// download. This can be either the release tag or a predefined placeholder (currently + /// supported one is only 'latest'). + #[clap(name = Target::RELEASE_DESIGNATOR_NAME, long, required_if_eq(Target::SOURCE_NAME, "release"), enso_env())] + pub release: Option, + + /// Used when `SourceKind::Build` is used. + #[clap(flatten)] + pub build_args: Target::BuildInput, + + #[clap(flatten)] + pub output_path: OutputPath, +} + +/// Discriminator denoting how some target artifact should be obtained. +#[derive(ArgEnum, Clone, Copy, Debug, PartialEq, Eq)] +pub enum SourceKind { + /// Target will be built from the target repository's sources. + Build, + /// Already built target will be copied from the local path. + Local, + /// Target will be downloaded from a completed CI run artifact. + CiRun, + /// Target will be downloaded from the CI run that is currently executing this script. + CurrentCiRun, + /// Target will be downloaded from a release asset. + Release, +} + +/// Strongly typed argument for an output directory of a given build target. +#[derive(Args, Clone, Derivative)] +#[derivative(Debug, PartialEq)] +pub struct OutputPath { + /// Directory where artifacts should be placed. + #[derivative(Debug(format_with = "display_fmt"))] + #[clap(name = Target::OUTPUT_PATH_NAME, long, parse(try_from_str=normalize_path), default_value = Target::DEFAULT_OUTPUT_PATH, enso_env())] + pub output_path: PathBuf, + #[derivative(Debug = "ignore", PartialEq(bound = ""))] + #[allow(missing_docs)] + #[clap(skip)] + pub phantom: PhantomData, +} + +impl AsRef for OutputPath { + fn as_ref(&self) -> &Path { + self.output_path.as_path() + } +} + +#[derive(Args, Clone, PartialEq, Derivative)] +#[derivative(Debug)] +pub struct BuildJob { + #[clap(flatten)] + pub input: Target::BuildInput, + #[clap(flatten)] + pub output_path: OutputPath, +} + +#[derive(Args, Clone, PartialEq, Derivative)] +#[derivative(Debug)] +pub struct WatchJob { + #[clap(flatten)] + pub build: BuildJob, + #[clap(flatten)] + pub watch_input: Target::WatchInput, +} diff --git a/build/cli/src/arg/backend.rs b/build/cli/src/arg/backend.rs new file mode 100644 index 0000000000..6535bb8878 --- /dev/null +++ b/build/cli/src/arg/backend.rs @@ -0,0 +1,64 @@ +use enso_build::prelude::*; + +use crate::arg::ArgExt; +use crate::arg::Source; +use crate::source_args_hlp; + +use clap::Args; +use clap::Subcommand; +use enso_build::project; +use enso_build::project::backend::Backend; + + + +#[derive(Args, Clone, Debug, PartialEq)] +pub struct BuildInput { + #[clap(flatten)] + pub runtime: Source, +} + +source_args_hlp!(Backend, "backend", BuildInput); + +#[derive(Subcommand, Clone, Debug, PartialEq)] +pub enum Command { + /// Build the backend from local sources. + #[clap(alias = "get")] + Build { + #[clap(flatten)] + source: Source, + }, + /// Build backend and upload it as a release asset. This command is intended to be run as part + /// of the CI process. + Upload { + #[clap(flatten)] + input: BuildInput, + }, + /// Execute benchmarks. + Benchmark { + /// Execute benchmark code only once. This is not useful for benchmarking, but ensures that + /// the benchmarks can execute without issues. + #[clap(long, enso_env())] + minimal_run: bool, + #[clap(arg_enum)] + which: Vec, + }, + /// Run the tests. + Test { + #[clap(arg_enum, required = true)] + which: Vec, + }, + /// Run an SBT command. + Sbt { + #[clap(last = true)] + command: Vec, + }, + /// Perform the CI check routine for the backend. + CiCheck {}, +} + +#[derive(Args, Clone, Debug, PartialEq)] +pub struct Target { + /// Command for backend package. + #[clap(subcommand)] + pub command: Command, +} diff --git a/build/cli/src/arg/engine.rs b/build/cli/src/arg/engine.rs new file mode 100644 index 0000000000..02cd816798 --- /dev/null +++ b/build/cli/src/arg/engine.rs @@ -0,0 +1,17 @@ +// use enso_build::prelude::*; +// +// use crate::arg::Source; +// use crate::source_args_hlp; +// use clap::Args; +// // use enso_build::project::engine::Engine; +// +// source_args_hlp!(Engine, "engine", BuildInput); +// +// #[derive(Args, Clone, Debug, PartialEq)] +// pub struct BuildInput {} +// +// #[derive(Args, Clone, Debug)] +// pub struct Target { +// #[clap(flatten)] +// pub source: Source, +// } diff --git a/build/cli/src/arg/git_clean.rs b/build/cli/src/arg/git_clean.rs new file mode 100644 index 0000000000..5f3536a146 --- /dev/null +++ b/build/cli/src/arg/git_clean.rs @@ -0,0 +1,13 @@ +use crate::prelude::*; + + + +#[derive(Clone, Copy, Debug, Default, clap::Args)] +pub struct Options { + /// Clean also the build script's cache (located in the user's local application data subtree). + #[clap(long)] + pub cache: bool, + /// Clean also the build script's build artifacts. + #[clap(long)] + pub build_script: bool, +} diff --git a/build/cli/src/arg/gui.rs b/build/cli/src/arg/gui.rs new file mode 100644 index 0000000000..f77e256cb5 --- /dev/null +++ b/build/cli/src/arg/gui.rs @@ -0,0 +1,53 @@ +use enso_build::prelude::*; + +use crate::arg::BuildJob; +use crate::arg::Source; +use crate::arg::WatchJob; +use crate::source_args_hlp; +use crate::IsWatchableSource; + +use clap::Args; +use clap::Subcommand; +use enso_build::project::gui::Gui; +use enso_build::project::wasm::Wasm; + + + +source_args_hlp!(Gui, "gui", BuildInput); + +impl IsWatchableSource for Gui { + type WatchInput = WatchInput; +} + +#[derive(Args, Clone, Debug, PartialEq)] +pub struct BuildInput { + #[clap(flatten)] + pub wasm: Source, +} + +#[derive(Args, Clone, Debug, PartialEq)] +pub struct WatchInput { + #[clap(flatten)] + pub wasm: ::WatchInput, + /// Does not spawn the web-side watcher and dev-server. Instead, a nested shell session will be + /// created, allowing user to run arbitrary commands in gui build environment. + #[clap(long)] + pub gui_shell: bool, +} + +#[derive(Subcommand, Clone, Debug, PartialEq)] +pub enum Command { + /// Builds the GUI from the local sources. + Build(BuildJob), + /// Gets the GUI, either by compiling it from scratch or downloading from an external source. + Get(Source), + /// Continuously rebuilds GUI when its sources are changed and serves it using dev-server. + Watch(WatchJob), +} + +#[derive(Args, Clone, Debug)] +pub struct Target { + /// Command for GUI package. + #[clap(subcommand)] + pub command: Command, +} diff --git a/build/cli/src/arg/ide.rs b/build/cli/src/arg/ide.rs new file mode 100644 index 0000000000..390e508003 --- /dev/null +++ b/build/cli/src/arg/ide.rs @@ -0,0 +1,85 @@ +use crate::prelude::*; + +use crate::arg::OutputPath; +use crate::arg::Source; +use crate::arg::WatchJob; +use crate::source_args_hlp; + +use clap::Args; +use clap::Subcommand; +use enso_build::project::backend::Backend; +use enso_build::project::gui::Gui; +use enso_build::project::wasm::DEFAULT_INTEGRATION_TESTS_WASM_TIMEOUT; +use octocrab::models::ReleaseId; + + + +source_args_hlp!(Target, "ide", BuildInput); + +#[derive(Args, Clone, Debug, PartialEq)] +pub struct BuildInput { + #[clap(flatten)] + pub gui: Source, + #[clap(flatten)] + pub project_manager: Source, + #[clap(flatten)] + pub output_path: OutputPath, +} + +#[derive(Subcommand, Clone, Debug)] +pub enum Command { + /// Builds both Project Manager and GUI, puts them together into a single, client Electron + /// application. + Build { + #[clap(flatten)] + params: BuildInput, + }, + Upload { + #[clap(flatten)] + params: BuildInput, + #[clap(long, env = enso_build::env::ReleaseId::NAME)] + release_id: ReleaseId, + }, + /// Like `Build` but automatically starts the IDE. + Start { + #[clap(flatten)] + params: BuildInput, + /// Additional option to be passed to Enso IDE. Can be used multiple times to pass many + /// arguments. + #[clap(long, allow_hyphen_values = true, enso_env())] + ide_option: Vec, + }, + /// Builds Project Manager and runs it in the background. Builds GUI and runs it using + /// webpack's dev server. + Watch { + #[clap(flatten)] + gui: WatchJob, + #[clap(flatten)] + project_manager: Source, + }, + /// Runs integration tests. This involves building and spawning Project Manager, unless + /// requested otherwise. + IntegrationTest { + /// If set, the project manager won't be spawned. + #[clap(long)] + external_backend: bool, + #[clap(flatten)] + project_manager: Source, + /// Run WASM tests in the headless mode + #[clap(long, parse(try_from_str), default_value_t = true)] + headless: bool, + /// Custom timeout for wasm-bindgen test runner. Supports formats like "300secs" or "5min". + #[clap(long, default_value_t = DEFAULT_INTEGRATION_TESTS_WASM_TIMEOUT.into())] + wasm_timeout: humantime::Duration, + /// Additional options to be appended to the wasm-pack invocation. Note that wasm-pack will + /// further redirect any unrecognized option to the underlying cargo call. + #[clap(last = true)] + wasm_pack_options: Vec, + }, +} + +#[derive(Args, Clone, Debug)] +pub struct Target { + #[clap(subcommand)] + pub command: Command, +} diff --git a/build/cli/src/arg/java_gen.rs b/build/cli/src/arg/java_gen.rs new file mode 100644 index 0000000000..1d32a3ac2b --- /dev/null +++ b/build/cli/src/arg/java_gen.rs @@ -0,0 +1,20 @@ +use crate::prelude::*; + +use clap::Args; +use clap::Subcommand; + + + +#[derive(Subcommand, Clone, Copy, Debug, PartialEq, Eq)] +pub enum Command { + /// Generate Java. + Build, + /// Generate Java and run self-tests. + Test, +} + +#[derive(Args, Clone, Copy, Debug)] +pub struct Target { + #[clap(subcommand)] + pub action: Command, +} diff --git a/build/cli/src/arg/project_manager.rs b/build/cli/src/arg/project_manager.rs new file mode 100644 index 0000000000..cfbd9b9698 --- /dev/null +++ b/build/cli/src/arg/project_manager.rs @@ -0,0 +1,18 @@ +// use enso_build::prelude::*; +// +// use crate::arg::Source; +// use crate::source_args_hlp; +// use enso_build::project::project_manager::ProjectManager; +// +// use clap::Args; +// +// source_args_hlp!(ProjectManager, "project-manager", BuildInput); +// +// #[derive(Args, Clone, Debug, PartialEq)] +// pub struct BuildInput {} +// +// #[derive(Args, Clone, Debug)] +// pub struct Target { +// #[clap(flatten)] +// pub source: Source, +// } diff --git a/build/cli/src/arg/release.rs b/build/cli/src/arg/release.rs new file mode 100644 index 0000000000..61ff3539bc --- /dev/null +++ b/build/cli/src/arg/release.rs @@ -0,0 +1,26 @@ +use crate::prelude::*; + +use clap::Args; +use clap::Subcommand; + + + +#[derive(Args, Clone, Debug)] +pub struct DeployToEcr { + #[clap(long, default_value = enso_build::aws::ecr::runtime::NAME, enso_env())] + pub ecr_repository: String, +} + +#[derive(Subcommand, Clone, Debug)] +pub enum Action { + CreateDraft, + /// Build the runtime image and push it to ECR. + DeployToEcr(DeployToEcr), + Publish, +} + +#[derive(Args, Clone, Debug)] +pub struct Target { + #[clap(subcommand)] + pub action: Action, +} diff --git a/build/cli/src/arg/runtime.rs b/build/cli/src/arg/runtime.rs new file mode 100644 index 0000000000..6bb9cd6106 --- /dev/null +++ b/build/cli/src/arg/runtime.rs @@ -0,0 +1,28 @@ +use crate::prelude::*; + +use crate::source_args_hlp; +use crate::BuildJob; + +use clap::Args; +use clap::Subcommand; +use enso_build::project::runtime::Runtime; + + + +source_args_hlp!(Runtime, "runtime", BuildInput); + +#[derive(Args, Clone, Copy, Debug, PartialEq, Eq)] +pub struct BuildInput {} + +#[derive(Subcommand, Clone, Debug, PartialEq)] +pub enum Command { + /// Build the WASM package. + Build(BuildJob), +} + +#[derive(Args, Clone, Debug)] +pub struct Target { + /// Command for Engine Runtime component. + #[clap(subcommand, name = "command")] + pub command: Command, +} diff --git a/build/cli/src/arg/wasm.rs b/build/cli/src/arg/wasm.rs new file mode 100644 index 0000000000..ed4049056f --- /dev/null +++ b/build/cli/src/arg/wasm.rs @@ -0,0 +1,125 @@ +use enso_build::prelude::*; + +use crate::arg::ArgExt; +use crate::arg::Source; +use crate::arg::WatchJob; +use crate::source_args_hlp; +use crate::BuildJob; +use crate::IsWatchableSource; + +use clap::ArgEnum; +use clap::Args; +use clap::Subcommand; +use enso_build::project::wasm::Wasm; +use std::sync::OnceLock; + + +// ============== +// === Export === +// ============== + +pub use enso_build::project::wasm::Profile; + + + +source_args_hlp!(Wasm, "wasm", BuildInput); + +impl IsWatchableSource for Wasm { + type WatchInput = WatchInput; +} + +static DEFAULT_WASM_SIZE_LIMIT: OnceLock = OnceLock::new(); + +pub fn initialize_default_wasm_size_limit(limit: byte_unit::Byte) -> Result { + DEFAULT_WASM_SIZE_LIMIT + .set(limit.get_appropriate_unit(true).to_string()) + .map_err(|e| anyhow!("WASM size limit was already set to {e}.")) +} + +// Follows hierarchy defined in lib/rust/profiler/src/lib.rs +#[derive(ArgEnum, Clone, Copy, Debug, PartialEq, Eq)] +pub enum ProfilingLevel { + Objective, + Task, + Detail, + Debug, +} + +impl From for enso_build::project::wasm::ProfilingLevel { + fn from(profile: ProfilingLevel) -> Self { + match profile { + ProfilingLevel::Objective => Self::Objective, + ProfilingLevel::Task => Self::Task, + ProfilingLevel::Detail => Self::Detail, + ProfilingLevel::Debug => Self::Debug, + } + } +} + +#[derive(Args, Clone, Debug, PartialEq, Eq)] +pub struct BuildInput { + /// Which crate should be treated as a WASM entry point. Relative path from source root. + #[clap(default_value = enso_build::project::wasm::DEFAULT_TARGET_CRATE, long, enso_env())] + pub crate_path: PathBuf, + + /// Profile that is passed to wasm-pack. + #[clap(long, arg_enum, default_value_t = Profile::Release, enso_env())] + pub wasm_profile: Profile, + + /// Additional options to be passed to wasm-opt. Might overwrite the optimization flag + /// resulting from 'wasm_profile' setting. + #[clap(long, allow_hyphen_values = true, enso_env())] + pub wasm_opt_option: Vec, + + /// Do not invoke wasm-opt, even if it is part of current profile. + #[clap(long, conflicts_with = "wasm-opt-option", enso_env())] + pub skip_wasm_opt: bool, + + /// Additional options to be passed to Cargo. + #[clap(last = true, enso_env())] + pub cargo_options: Vec, + + /// Compiles Enso with given profiling level. If not set, defaults to minimum. + #[clap(long, arg_enum, enso_env())] + pub profiling_level: Option, + + /// Fail the build if compressed WASM exceeds the specified size. Supports format like + /// "4.06MiB". Pass "0" to disable check. + #[clap(long, enso_env(), default_value_if("skip_wasm_opt", Some("true"), Some("0")), maybe_default = DEFAULT_WASM_SIZE_LIMIT.get())] + pub wasm_size_limit: Option, +} + +#[derive(Args, Clone, Debug, PartialEq, Eq)] +pub struct WatchInput { + /// Additional option to be passed to Cargo. Can be used multiple times to pass many arguments. + #[clap(long, allow_hyphen_values = true, enso_env())] + pub cargo_watch_option: Vec, +} + +#[derive(Subcommand, Clone, Debug, PartialEq)] +pub enum Command { + /// Build the WASM package. + Build(BuildJob), + /// Lint the coodebase. + Check, + /// Get the WASM artifacts from arbitrary source (e.g. release). + Get(Source), + /// Start an ongoing watch process that rebuilds WASM when its sources are touched. + Watch(WatchJob), + /// Run the unit tests. + Test { + /// Skip the native (non-WASM) Rust tests. + #[clap(long)] + no_native: bool, + /// Skip the WASM Rust tests. + #[clap(long)] + no_wasm: bool, + }, +} + +#[derive(Args, Clone, Debug)] +pub struct Target { + /// Command for WASM part of GUI (aka the Rust part). + #[clap(subcommand, name = "command")] + pub command: Command, +} diff --git a/build/cli/src/bin/enso-build4/main.rs b/build/cli/src/bin/enso-build4/main.rs new file mode 100644 index 0000000000..a09d602460 --- /dev/null +++ b/build/cli/src/bin/enso-build4/main.rs @@ -0,0 +1,33 @@ +// === Features === +#![feature(default_free_fn)] +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] + +use enso_build::prelude::*; + +use ide_ci::actions::workflow::MessageLevel; +use ide_ci::log::setup_logging; + + + +#[tokio::main] +async fn main() -> Result { + setup_logging()?; + + ide_ci::actions::workflow::debug("Debug"); + ide_ci::actions::workflow::message(MessageLevel::Debug, "Debug2"); + ide_ci::actions::workflow::message(MessageLevel::Notice, "Notice"); + ide_ci::actions::workflow::message(MessageLevel::Warning, "Warning"); + ide_ci::actions::workflow::message(MessageLevel::Error, "Error"); + + println!("Hello"); + trace!("Hello"); + debug!("Hello"); + info!("Hello"); + warn!("Hello"); + error!("Hello"); + Ok(()) +} diff --git a/build/cli/src/bin/enso-disable-wasm-opt.rs b/build/cli/src/bin/enso-disable-wasm-opt.rs new file mode 100644 index 0000000000..7eb6f0d3d8 --- /dev/null +++ b/build/cli/src/bin/enso-disable-wasm-opt.rs @@ -0,0 +1,94 @@ +//! This script is used to disable the `wasm-opt` optimization in the crates that can be used as +//! WASM entry points. Unfortunately, wasm-opt does not allow for disabling wasm-opt through a +//! command line flag, so we have to disable it by setting an appropriate flag in each Cargo.toml. + +// === Features === +#![feature(option_result_contains)] +#![feature(associated_type_bounds)] +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] + +use enso_build_cli::prelude::*; + +use enso_build::paths::parent_cargo_toml; +use enso_build::repo::deduce_repository_path; +use ide_ci::log::setup_logging; + + + +/// Path in the Cargo.toml file where the `wasm-opt` flag is stored. +/// +/// This flag controls whether wasm-pack shall invoke wasm-opt on the generated wasm file. +const WASM_OPT_PATH: [&str; 6] = + ["package", "metadata", "wasm-pack", "profile", "release", "wasm-opt"]; + +/// Piece of code that will disable wasm-opt when added to Cargo.toml. +pub fn suffix_that_disables_wasm_opt() -> String { + let without_last = WASM_OPT_PATH[..WASM_OPT_PATH.len() - 1].join("."); + let last = WASM_OPT_PATH.last().unwrap(); + format!( + r#" +# Stop wasm-pack from running wasm-opt, because we run it from our build scripts in order to customize options. +[{without_last}] +{last} = false"# + ) +} + +/// Check if the Rust source file under given path contains a WASM entry point. +pub fn contains_entry_point(path: impl AsRef) -> Result { + Ok(ide_ci::fs::read_to_string(path)?.contains("#[entry_point")) +} + +/// Retrieve item by repeatedly indexing. +pub fn traverse( + item: &toml::Value, + keys: impl IntoIterator>, +) -> Option<&toml::Value> { + keys.into_iter().try_fold(item, |item, key| item.get(key.as_ref())) +} + +/// Check if the given (parsed) Cargo.toml has already disabled wasm-opt. +fn has_wasm_opt_disabled(document: &toml::Value) -> bool { + let wasm_opt_entry = traverse(document, WASM_OPT_PATH); + wasm_opt_entry.and_then(toml::Value::as_bool).contains(&false) +} + +/// Disable wasm-opt in the Cargo.toml file. +/// +/// Does nothing if wasm-opt is already disabled. +fn disable_wasm_opt_in_cargo_toml(path: impl AsRef) -> Result { + assert!(path.as_ref().is_file()); + assert_eq!(path.as_ref().file_name().unwrap(), "Cargo.toml"); + let doc = toml::Value::from_str(&ide_ci::fs::read_to_string(&path)?)?; + if !has_wasm_opt_disabled(&doc) { + info!("Disabling wasm-opt in {}", path.as_ref().display()); + ide_ci::fs::append(path, suffix_that_disables_wasm_opt())?; + } else { + info!("wasm-opt is already disabled in {}", path.as_ref().display()); + } + Ok(()) +} + +#[tokio::main] +async fn main() -> Result { + setup_logging()?; + let root = deduce_repository_path()?; + let rs_source_glob = PathBuf::from_iter([root.as_str(), "**", "*.rs"]).display().to_string(); + info!("Searching for Rust source files in {}", rs_source_glob); + let rs_files = glob::glob(&rs_source_glob)?.try_collect_vec()?; + info!("Completed source discovery. Found {} files.", rs_files.len()); + + let entry_points: Vec<_> = rs_files.into_iter().try_filter(|p| contains_entry_point(p))?; + info!("{} of them are entry points.", entry_points.len()); + + let cargo_tomls: BTreeSet<_> = entry_points.into_iter().try_map(parent_cargo_toml)?; + info!("They belong to {} crates.", cargo_tomls.len()); + + for cargo_toml in &cargo_tomls { + disable_wasm_opt_in_cargo_toml(cargo_toml)?; + } + Ok(()) +} diff --git a/build/cli/src/bin/enso-remove-draft-releases.rs b/build/cli/src/bin/enso-remove-draft-releases.rs new file mode 100644 index 0000000000..5a9b9d564d --- /dev/null +++ b/build/cli/src/bin/enso-remove-draft-releases.rs @@ -0,0 +1,35 @@ +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] + +use enso_build_cli::prelude::*; + +use enso_build::setup_octocrab; +use ide_ci::io::web::handle_error_response; +use ide_ci::log::setup_logging; +use ide_ci::models::config::RepoContext; + + + +#[tokio::main] +async fn main() -> Result { + setup_logging()?; + let repo = RepoContext::from_str("enso-org/enso")?; + let octo = setup_octocrab().await?; + + let releases = repo.all_releases(&octo).await?; + let draft_releases = releases.into_iter().filter(|r| r.draft); + for release in draft_releases { + let id = release.id; + + let route = format!("{}repos/{repo}/releases/{id}", octo.base_url); + info!("Will delete {}: {route}.", release.name.unwrap_or_default()); + let response = octo._delete(route, Option::<&()>::None).await?; + handle_error_response(response).await?; + } + + + Ok(()) +} diff --git a/build/cli/src/ci_gen.rs b/build/cli/src/ci_gen.rs new file mode 100644 index 0000000000..0a05434238 --- /dev/null +++ b/build/cli/src/ci_gen.rs @@ -0,0 +1,382 @@ +use crate::prelude::*; + +use crate::ci_gen::job::expose_os_specific_signing_secret; +use crate::ci_gen::job::plain_job; +use crate::ci_gen::job::plain_job_customized; +use crate::ci_gen::job::RunsOn; + +use ide_ci::actions::workflow::definition::checkout_repo_step; +use ide_ci::actions::workflow::definition::is_non_windows_runner; +use ide_ci::actions::workflow::definition::is_windows_runner; +use ide_ci::actions::workflow::definition::run; +use ide_ci::actions::workflow::definition::setup_artifact_api; +use ide_ci::actions::workflow::definition::setup_conda; +use ide_ci::actions::workflow::definition::setup_wasm_pack_step; +use ide_ci::actions::workflow::definition::wrap_expression; +use ide_ci::actions::workflow::definition::Branches; +use ide_ci::actions::workflow::definition::Concurrency; +use ide_ci::actions::workflow::definition::Event; +use ide_ci::actions::workflow::definition::Job; +use ide_ci::actions::workflow::definition::JobArchetype; +use ide_ci::actions::workflow::definition::PullRequest; +use ide_ci::actions::workflow::definition::PullRequestActivityType; +use ide_ci::actions::workflow::definition::Push; +use ide_ci::actions::workflow::definition::RunnerLabel; +use ide_ci::actions::workflow::definition::Schedule; +use ide_ci::actions::workflow::definition::Step; +use ide_ci::actions::workflow::definition::Workflow; +use ide_ci::actions::workflow::definition::WorkflowDispatch; +use ide_ci::actions::workflow::definition::WorkflowDispatchInput; +use ide_ci::actions::workflow::definition::WorkflowDispatchInputType; + + +// ============== +// === Export === +// ============== + +pub mod job; +pub mod step; + + + +#[derive(Clone, Copy, Debug)] +pub struct DeluxeRunner; + +#[derive(Clone, Copy, Debug)] +pub struct BenchmarkRunner; + +pub const PRIMARY_OS: OS = OS::Linux; + +pub const TARGETED_SYSTEMS: [OS; 3] = [OS::Windows, OS::Linux, OS::MacOS]; + +pub const DEFAULT_BRANCH_NAME: &str = "develop"; + +/// Secrets set up in our organization. +/// +/// To manage, see: https://github.com/organizations/enso-org/settings/secrets/actions +pub mod secret { + // === AWS S3 deploy (release list) === + pub const ARTEFACT_S3_ACCESS_KEY_ID: &str = "ARTEFACT_S3_ACCESS_KEY_ID"; + pub const ARTEFACT_S3_SECRET_ACCESS_KEY: &str = "ARTEFACT_S3_SECRET_ACCESS_KEY"; + + + // === AWS ECR deployment (runtime release to cloud) === + pub const ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY: &str = "ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY"; + pub const ECR_PUSH_RUNTIME_ACCESS_KEY_ID: &str = "ECR_PUSH_RUNTIME_ACCESS_KEY_ID"; + + + // === Apple Code Signing & Notarization === + pub const APPLE_CODE_SIGNING_CERT: &str = "APPLE_CODE_SIGNING_CERT"; + pub const APPLE_CODE_SIGNING_CERT_PASSWORD: &str = "APPLE_CODE_SIGNING_CERT_PASSWORD"; + pub const APPLE_NOTARIZATION_USERNAME: &str = "APPLE_NOTARIZATION_USERNAME"; + pub const APPLE_NOTARIZATION_PASSWORD: &str = "APPLE_NOTARIZATION_PASSWORD"; + + + // === Windows Code Signing === + /// Name of the GitHub Actions secret that stores path to the Windows code signing certificate + /// within the runner. + pub const WINDOWS_CERT_PATH: &str = "MICROSOFT_CODE_SIGNING_CERT"; + + /// Name of the GitHub Actions secret that stores password to the Windows code signing + /// certificate. + pub const WINDOWS_CERT_PASSWORD: &str = "MICROSOFT_CODE_SIGNING_CERT_PASSWORD"; +} + + +impl RunsOn for DeluxeRunner { + fn runs_on(&self) -> Vec { + vec![RunnerLabel::MwuDeluxe] + } + fn os_name(&self) -> Option { + None + } +} + +impl RunsOn for BenchmarkRunner { + fn runs_on(&self) -> Vec { + vec![RunnerLabel::Benchmark] + } + fn os_name(&self) -> Option { + None + } +} + +pub fn on_default_branch_push() -> Push { + Push { inner_branches: Branches::new([DEFAULT_BRANCH_NAME]), ..default() } +} + +pub fn runs_on(os: OS) -> Vec { + match os { + OS::Windows => vec![RunnerLabel::SelfHosted, RunnerLabel::Windows, RunnerLabel::Engine], + OS::Linux => vec![RunnerLabel::SelfHosted, RunnerLabel::Linux, RunnerLabel::Engine], + OS::MacOS => vec![RunnerLabel::MacOSLatest], + _ => todo!("Not supported"), + } +} + +pub fn setup_script_steps() -> Vec { + let mut ret = vec![setup_conda(), setup_wasm_pack_step(), setup_artifact_api()]; + ret.extend(checkout_repo_step()); + ret.push(run("--help").with_name("Build Script Setup")); + ret +} + +pub fn list_everything_on_failure() -> impl IntoIterator { + let win = Step { + name: Some("List files if failed (Windows)".into()), + r#if: Some(format!("failure() && {}", is_windows_runner())), + run: Some("Get-ChildItem -Force -Recurse".into()), + ..default() + }; + + let non_win = Step { + name: Some("List files if failed (non-Windows)".into()), + r#if: Some(format!("failure() && {}", is_non_windows_runner())), + run: Some("ls -lAR".into()), + ..default() + }; + + [win, non_win] +} + +/// The `f` is applied to the step that does an actual script invocation. +pub fn setup_customized_script_steps( + command_line: impl AsRef, + customize: impl FnOnce(Step) -> Vec, +) -> Vec { + use enso_build::ci::labels::CLEAN_BUILD_REQUIRED; + // Check if the pull request has a "Clean required" label. + let pre_clean_condition = + format!("contains(github.event.pull_request.labels.*.name, '{CLEAN_BUILD_REQUIRED}')",); + let post_clean_condition = format!("always() && {pre_clean_condition}"); + + let mut steps = setup_script_steps(); + let clean_step = run("git-clean").with_if(&pre_clean_condition).with_name("Clean before"); + steps.push(clean_step.clone()); + steps.extend(customize(run(command_line))); + steps.extend(list_everything_on_failure()); + steps.push( + clean_step + .with_if(format!("always() && {}", post_clean_condition)) + .with_name("Clean after"), + ); + steps +} + +pub fn setup_script_and_steps(command_line: impl AsRef) -> Vec { + setup_customized_script_steps(command_line, |s| vec![s]) +} + +#[derive(Clone, Copy, Debug)] +pub struct DraftRelease; +impl JobArchetype for DraftRelease { + fn job(os: OS) -> Job { + let name = "Create release draft".into(); + + let prepare_step = run("release create-draft").with_id(Self::PREPARE_STEP_ID); + + let mut steps = setup_script_steps(); + steps.push(prepare_step); + + let mut ret = Job { name, runs_on: runs_on(os), steps, ..default() }; + Self::expose_outputs(&mut ret); + ret + } + + fn outputs() -> BTreeMap> { + let mut ret = BTreeMap::new(); + ret.insert(Self::PREPARE_STEP_ID.into(), vec![ + "ENSO_VERSION".into(), + "ENSO_RELEASE_ID".into(), + ]); + ret + } +} + +impl DraftRelease { + pub const PREPARE_STEP_ID: &'static str = "prepare"; +} + +#[derive(Clone, Copy, Debug)] +pub struct PublishRelease; +impl JobArchetype for PublishRelease { + fn job(os: OS) -> Job { + let mut ret = plain_job(&os, "Publish release", "release publish"); + ret.expose_secret_as(secret::ARTEFACT_S3_ACCESS_KEY_ID, "AWS_ACCESS_KEY_ID"); + ret.expose_secret_as(secret::ARTEFACT_S3_SECRET_ACCESS_KEY, "AWS_SECRET_ACCESS_KEY"); + ret.env("AWS_REGION", "us-west-1"); + ret + } +} + +#[derive(Clone, Copy, Debug)] +pub struct UploadIde; +impl JobArchetype for UploadIde { + fn job(os: OS) -> Job { + plain_job_customized(&os, "Build IDE", "ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}", |step| + vec![expose_os_specific_signing_secret(os, step)] + ) + } +} + +/// Generate a workflow that checks if the changelog has been updated (if needed). +pub fn changelog() -> Result { + use PullRequestActivityType::*; + let mut ret = Workflow::new("Changelog"); + ret.on.pull_request(PullRequest::default().with_types([ + Labeled, + Unlabeled, + Synchronize, + Opened, + Reopened, + ])); + ret.add_job(Job { + name: "Changelog".into(), + runs_on: vec![RunnerLabel::X64], + steps: setup_script_and_steps("changelog-check"), + ..default() + }); + Ok(ret) +} + +pub fn nightly() -> Result { + let on = Event { + workflow_dispatch: Some(default()), + // 5am (UTC) from Tuesday to Saturday (i.e. after every workday) + schedule: vec![Schedule::new("0 5 * * 2-6")?], + ..default() + }; + + let linux_only = OS::Linux; + + let concurrency_group = "release"; + let mut workflow = Workflow { + on, + name: "Nightly Release".into(), + concurrency: Some(Concurrency::new(concurrency_group)), + ..default() + }; + + let prepare_job_id = workflow.add::(linux_only); + let build_wasm_job_id = workflow.add::(linux_only); + let mut packaging_job_ids = vec![]; + + // Assumed, because Linux is necessary to deploy ECR runtime image. + assert!(TARGETED_SYSTEMS.contains(&OS::Linux)); + + for os in TARGETED_SYSTEMS { + let backend_job_id = workflow.add_dependent::(os, [&prepare_job_id]); + let build_ide_job_id = workflow.add_dependent::(os, [ + &prepare_job_id, + &backend_job_id, + &build_wasm_job_id, + ]); + packaging_job_ids.push(build_ide_job_id); + + if os == OS::Linux { + let upload_runtime_job_id = workflow + .add_dependent::(os, [&prepare_job_id, &backend_job_id]); + packaging_job_ids.push(upload_runtime_job_id); + } + } + + let publish_deps = { + packaging_job_ids.push(prepare_job_id); + packaging_job_ids + }; + + let _publish_job_id = workflow.add_dependent::(linux_only, publish_deps); + let global_env = [("ENSO_BUILD_KIND", "nightly"), ("RUST_BACKTRACE", "full")]; + for (var_name, value) in global_env { + workflow.env(var_name, value); + } + Ok(workflow) +} + +pub fn typical_check_triggers() -> Event { + Event { + pull_request: Some(default()), + workflow_dispatch: Some(default()), + push: Some(on_default_branch_push()), + ..default() + } +} + +pub fn gui() -> Result { + let on = typical_check_triggers(); + let mut workflow = Workflow { name: "GUI CI".into(), on, ..default() }; + workflow.add::(PRIMARY_OS); + workflow.add::(PRIMARY_OS); + workflow.add::(PRIMARY_OS); + workflow.add::(PRIMARY_OS); + + // FIXME: Integration tests are currently always failing. + // The should be reinstated when fixed. + // workflow.add_customized::(PRIMARY_OS, |job| { + // job.needs.insert(job::BuildBackend::key(PRIMARY_OS)); + // }); + + // Because WASM upload happens only for the Linux build, all other platforms needs to depend on + // it. + let wasm_job_linux = workflow.add::(OS::Linux); + for os in TARGETED_SYSTEMS { + if os != OS::Linux { + // Linux was already added above. + let _wasm_job = workflow.add::(os); + } + let project_manager_job = workflow.add::(os); + workflow.add_customized::(os, |job| { + job.needs.insert(wasm_job_linux.clone()); + job.needs.insert(project_manager_job); + }); + } + Ok(workflow) +} + +pub fn backend() -> Result { + let on = typical_check_triggers(); + let mut workflow = Workflow { name: "Engine CI".into(), on, ..default() }; + workflow.add::(PRIMARY_OS); + for os in TARGETED_SYSTEMS { + workflow.add::(os); + } + Ok(workflow) +} + +pub fn benchmark() -> Result { + let just_check_input_name = "just-check"; + let just_check_input = WorkflowDispatchInput { + r#type: WorkflowDispatchInputType::Boolean{default: Some(false)}, + ..WorkflowDispatchInput::new("If set, benchmarks will be only checked to run correctly, not to measure actual performance.", true) + }; + let on = Event { + push: Some(on_default_branch_push()), + workflow_dispatch: Some( + WorkflowDispatch::default().with_input(just_check_input_name, just_check_input), + ), + schedule: vec![Schedule::new("0 5 * * 2-6")?], + ..default() + }; + let mut workflow = Workflow { name: "Benchmark Engine".into(), on, ..default() }; + // Note that we need to use `true == input` instead of `input` because that interprets input as + // `false` rather than empty string. Empty string is not falsy enough. + workflow.env( + "ENSO_BUILD_MINIMAL_RUN", + wrap_expression(format!("true == inputs.{just_check_input_name}")), + ); + + let benchmark_job = + plain_job(&BenchmarkRunner, "Benchmark Engine", "backend benchmark runtime"); + workflow.add_job(benchmark_job); + Ok(workflow) +} + + +pub fn generate(repo_root: &enso_build::paths::generated::RepoRootGithubWorkflows) -> Result { + repo_root.changelog_yml.write_as_yaml(&changelog()?)?; + repo_root.nightly_yml.write_as_yaml(&nightly()?)?; + repo_root.scala_new_yml.write_as_yaml(&backend()?)?; + repo_root.gui_yml.write_as_yaml(&gui()?)?; + repo_root.benchmark_yml.write_as_yaml(&benchmark()?)?; + Ok(()) +} diff --git a/build/cli/src/ci_gen/job.rs b/build/cli/src/ci_gen/job.rs new file mode 100644 index 0000000000..e959c25c23 --- /dev/null +++ b/build/cli/src/ci_gen/job.rs @@ -0,0 +1,211 @@ +use crate::prelude::*; + +use crate::ci_gen::runs_on; +use crate::ci_gen::secret; +use crate::ci_gen::step; + +use ide_ci::actions::workflow::definition::cancel_workflow_action; +use ide_ci::actions::workflow::definition::Job; +use ide_ci::actions::workflow::definition::JobArchetype; +use ide_ci::actions::workflow::definition::RunnerLabel; +use ide_ci::actions::workflow::definition::Step; +use ide_ci::actions::workflow::definition::Strategy; + + + +pub trait RunsOn { + fn strategy(&self) -> Option { + None + } + fn runs_on(&self) -> Vec; + fn os_name(&self) -> Option { + None + } +} + +impl RunsOn for OS { + fn runs_on(&self) -> Vec { + runs_on(*self) + } + fn os_name(&self) -> Option { + Some(self.to_string()) + } +} + +impl RunsOn for Strategy { + fn strategy(&self) -> Option { + Some(self.clone()) + } + + fn runs_on(&self) -> Vec { + vec![RunnerLabel::MatrixOs] + } +} + +pub fn plain_job( + runs_on_info: &impl RunsOn, + name: impl AsRef, + command_line: impl AsRef, +) -> Job { + plain_job_customized(runs_on_info, name, command_line, |s| vec![s]) +} + +pub fn plain_job_customized( + runs_on_info: &impl RunsOn, + name: impl AsRef, + command_line: impl AsRef, + f: impl FnOnce(Step) -> Vec, +) -> Job { + let name = if let Some(os_name) = runs_on_info.os_name() { + format!("{} ({})", name.as_ref(), os_name) + } else { + name.as_ref().to_string() + }; + let steps = crate::ci_gen::setup_customized_script_steps(command_line, f); + let runs_on = runs_on_info.runs_on(); + let strategy = runs_on_info.strategy(); + Job { name, runs_on, steps, strategy, ..default() } +} + +#[derive(Clone, Copy, Debug)] +pub struct CancelWorkflow; +impl JobArchetype for CancelWorkflow { + fn job(_os: OS) -> Job { + Job { + name: "Cancel Previous Runs".into(), + // It is important that this particular job runs pretty much everywhere (we use x64, + // as all currently available GH runners have this label). If we limited it only to + // our self-hosted machines (as we usually do), it'd be enqueued after other jobs + // and wouldn't be able to cancel them. + runs_on: vec![RunnerLabel::X64], + steps: vec![cancel_workflow_action()], + ..default() + } + } +} + +#[derive(Clone, Copy, Debug)] +pub struct Lint; +impl JobArchetype for Lint { + fn job(os: OS) -> Job { + plain_job(&os, "Lint", "lint") + } +} + +#[derive(Clone, Copy, Debug)] +pub struct NativeTest; +impl JobArchetype for NativeTest { + fn job(os: OS) -> Job { + plain_job(&os, "Native GUI tests", "wasm test --no-wasm") + } +} + +#[derive(Clone, Copy, Debug)] +pub struct WasmTest; +impl JobArchetype for WasmTest { + fn job(os: OS) -> Job { + plain_job(&os, "WASM GUI tests", "wasm test --no-native") + } +} + +#[derive(Clone, Copy, Debug)] +pub struct IntegrationTest; +impl JobArchetype for IntegrationTest { + fn job(os: OS) -> Job { + plain_job( + &os, + "IDE integration tests", + "ide integration-test --backend-source current-ci-run", + ) + } +} + +#[derive(Clone, Copy, Debug)] +pub struct BuildWasm; +impl JobArchetype for BuildWasm { + fn job(os: OS) -> Job { + plain_job( + &os, + "Build GUI (WASM)", + " --upload-artifacts ${{ runner.os == 'Linux' }} wasm build", + ) + } +} + +#[derive(Clone, Copy, Debug)] +pub struct BuildBackend; +impl JobArchetype for BuildBackend { + fn job(os: OS) -> Job { + plain_job(&os, "Build Backend", "backend get") + } +} + +#[derive(Clone, Copy, Debug)] +pub struct UploadBackend; +impl JobArchetype for UploadBackend { + fn job(os: OS) -> Job { + plain_job(&os, "Upload Backend", "backend upload") + } +} + +#[derive(Clone, Copy, Debug)] +pub struct UploadRuntimeToEcr; +impl JobArchetype for UploadRuntimeToEcr { + fn job(os: OS) -> Job { + plain_job_customized(&os, "Upload Runtime to ECR", "release deploy-to-ecr", |step| { + let step = step + .with_env("ENSO_BUILD_ECR_REPOSITORY", enso_build::aws::ecr::runtime::NAME) + .with_secret_exposed_as(secret::ECR_PUSH_RUNTIME_ACCESS_KEY_ID, "AWS_ACCESS_KEY_ID") + .with_secret_exposed_as( + secret::ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY, + "AWS_SECRET_ACCESS_KEY", + ) + .with_env("AWS_DEFAULT_REGION", enso_build::aws::ecr::runtime::REGION); + vec![step] + }) + } +} + +pub fn expose_os_specific_signing_secret(os: OS, step: Step) -> Step { + match os { + OS::Windows => step + .with_secret_exposed_as( + secret::WINDOWS_CERT_PATH, + &enso_build::ide::web::env::WIN_CSC_LINK, + ) + .with_secret_exposed_as( + secret::WINDOWS_CERT_PASSWORD, + &enso_build::ide::web::env::WIN_CSC_KEY_PASSWORD, + ), + OS::MacOS => step + .with_secret_exposed_as(secret::APPLE_CODE_SIGNING_CERT, "CSC_LINK") + .with_secret_exposed_as(secret::APPLE_CODE_SIGNING_CERT_PASSWORD, "CSC_KEY_PASSWORD") + .with_secret_exposed_as(secret::APPLE_NOTARIZATION_USERNAME, "APPLEID") + .with_secret_exposed_as(secret::APPLE_NOTARIZATION_PASSWORD, "APPLEIDPASS") + .with_env("CSC_IDENTITY_AUTO_DISCOVERY", "true"), + _ => step, + } +} + +#[derive(Clone, Copy, Debug)] +pub struct PackageIde; +impl JobArchetype for PackageIde { + fn job(os: OS) -> Job { + plain_job_customized( + &os, + "Package IDE", + "ide build --wasm-source current-ci-run --backend-source current-ci-run", + |step| vec![expose_os_specific_signing_secret(os, step)], + ) + } +} + +#[derive(Clone, Copy, Debug)] +pub struct CiCheckBackend; +impl JobArchetype for CiCheckBackend { + fn job(os: OS) -> Job { + plain_job_customized(&os, "Engine", "backend ci-check", |main_step| { + vec![main_step, step::engine_test_reporter(os), step::stdlib_test_reporter(os)] + }) + } +} diff --git a/build/cli/src/ci_gen/step.rs b/build/cli/src/ci_gen/step.rs new file mode 100644 index 0000000000..4738205944 --- /dev/null +++ b/build/cli/src/ci_gen/step.rs @@ -0,0 +1,39 @@ +use crate::prelude::*; + +use enso_build::paths; +use ide_ci::actions::workflow::definition::env_expression; +use ide_ci::actions::workflow::definition::Step; + + + +pub fn test_reporter( + step_name: impl Into, + report_name: impl Into, + path: impl Into, +) -> Step { + Step { + name: Some(step_name.into()), + uses: Some("dorny/test-reporter@v1".into()), + r#if: Some("success() || failure()".into()), + ..default() + } + .with_custom_argument("reporter", "java-junit") + .with_custom_argument("path", path) + .with_custom_argument("path-replace-backslashes", true) + .with_custom_argument("max-annotations", 50) // 50 is the max + .with_custom_argument("name", report_name) +} + +pub fn stdlib_test_reporter(os: OS) -> Step { + let step_name = "Standard Library Test Reporter"; + let report_name = format!("Standard Library Tests ({os})"); + let path = format!("{}/*/*.xml", env_expression(&paths::ENSO_TEST_JUNIT_DIR)); + test_reporter(step_name, report_name, path) +} + +pub fn engine_test_reporter(os: OS) -> Step { + let step_name = "Engine Test Reporter"; + let report_name = format!("Engine Tests ({os})"); + let path = format!("{}/*.xml", env_expression(&paths::ENSO_TEST_JUNIT_DIR)); + test_reporter(step_name, report_name, path) +} diff --git a/build/cli/src/lib.rs b/build/cli/src/lib.rs new file mode 100644 index 0000000000..252e1e4abc --- /dev/null +++ b/build/cli/src/lib.rs @@ -0,0 +1,931 @@ +// === Features === +#![feature(option_result_contains)] +#![feature(once_cell)] +#![feature(default_free_fn)] +#![feature(future_join)] +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] +// === Non-Standard Linter Configuration === +#![warn(missing_copy_implementations)] +#![warn(missing_debug_implementations)] +#![warn(trivial_numeric_casts)] +#![warn(unused_import_braces)] +#![warn(unused_qualifications)] + + +// ============== +// === Export === +// ============== + +pub mod arg; +pub mod ci_gen; + + + +pub mod prelude { + pub use crate::arg::ArgExt as _; + pub use enso_build::prelude::*; +} + +use crate::prelude::*; +use std::future::join; + +use ide_ci::env::Variable; + +use crate::arg::java_gen; +use crate::arg::release::Action; +use crate::arg::BuildJob; +use crate::arg::Cli; +use crate::arg::IsTargetSource; +use crate::arg::IsWatchableSource; +use crate::arg::Target; +use crate::arg::WatchJob; +use anyhow::Context; +use clap::Parser; +use derivative::Derivative; +use enso_build::context::BuildContext; +use enso_build::engine::context::EnginePackageProvider; +use enso_build::engine::Benchmarks; +use enso_build::engine::Tests; +use enso_build::paths::TargetTriple; +use enso_build::prettier; +use enso_build::project; +use enso_build::project::backend; +use enso_build::project::backend::Backend; +use enso_build::project::gui; +use enso_build::project::gui::Gui; +use enso_build::project::ide; +use enso_build::project::ide::Ide; +use enso_build::project::runtime; +use enso_build::project::runtime::Runtime; +use enso_build::project::wasm; +use enso_build::project::wasm::Wasm; +use enso_build::project::IsTarget; +use enso_build::project::IsWatchable; +use enso_build::project::IsWatcher; +use enso_build::project::ProcessWrapper; +use enso_build::setup_octocrab; +use enso_build::source::BuildTargetJob; +use enso_build::source::CiRunSource; +use enso_build::source::ExternalSource; +use enso_build::source::GetTargetJob; +use enso_build::source::OngoingCiRunSource; +use enso_build::source::ReleaseSource; +use enso_build::source::Source; +use enso_build::source::WatchTargetJob; +use enso_build::source::WithDestination; +use futures_util::future::try_join; +use ide_ci::actions::workflow::is_in_env; +use ide_ci::cache::Cache; +use ide_ci::fs::remove_if_exists; +use ide_ci::github::release::upload_asset; +use ide_ci::global; +use ide_ci::log::setup_logging; +use ide_ci::ok_ready_boxed; +use ide_ci::programs::cargo; +use ide_ci::programs::git::clean; +use ide_ci::programs::rustc; +use ide_ci::programs::Cargo; +use std::time::Duration; +use tempfile::tempdir; +use tokio::process::Child; + +pub fn void(_t: T) {} + +fn resolve_artifact_name(input: Option, project: &impl IsTarget) -> String { + input.unwrap_or_else(|| project.artifact_name()) +} + +#[derive(Clone, Copy, Debug)] +pub struct BuildKind; +impl Variable for BuildKind { + const NAME: &'static str = "ENSO_BUILD_KIND"; + type Value = enso_build::version::BuildKind; +} + +/// The basic, common information available in this application. +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct Processor { + pub context: BuildContext, +} + +impl Deref for Processor { + type Target = BuildContext; + + fn deref(&self) -> &Self::Target { + &self.context + } +} + +impl Processor { + /// Setup common build environment information based on command line input and local + /// environment. + pub async fn new(cli: &Cli) -> Result { + // let build_kind = match &cli.target { + // Target::Release(release) => release.kind, + // _ => enso_build::version::BuildKind::Dev, + // }; + let absolute_repo_path = cli.repo_path.absolutize()?; + let octocrab = setup_octocrab().await?; + let versions = enso_build::version::deduce_versions( + &octocrab, + cli.build_kind, + Ok(&cli.repo_remote), + &absolute_repo_path, + ) + .await?; + let mut triple = TargetTriple::new(versions); + triple.os = cli.target_os; + triple.versions.publish()?; + let context = BuildContext { + inner: project::Context { + cache: Cache::new(&cli.cache_path).await?, + octocrab, + upload_artifacts: cli.upload_artifacts, + repo_root: enso_build::paths::new_repo_root(absolute_repo_path, &triple), + }, + triple, + remote_repo: cli.repo_remote.clone(), + }; + Ok(Self { context }) + } + + pub fn context(&self) -> project::Context { + self.inner.clone() + } + + pub fn resolve( + &self, + target: T, + source: arg::Source, + ) -> BoxFuture<'static, Result>> + where + T: Resolvable, + { + let span = info_span!("Resolving.", ?target, ?source).entered(); + let destination = source.output_path.output_path; + let source = match source.source { + arg::SourceKind::Build => + T::resolve(self, source.build_args).map_ok(Source::BuildLocally).boxed(), + arg::SourceKind::Local => + ok_ready_boxed(Source::External(ExternalSource::LocalFile(source.path))), + arg::SourceKind::CiRun => { + let run_id = source.run_id.context(format!( + "Missing run ID, please provide {} argument.", + T::RUN_ID_NAME + )); + let source = run_id.map(|run_id| { + Source::External(ExternalSource::CiRun(CiRunSource { + run_id, + repository: self.remote_repo.clone(), + artifact_name: resolve_artifact_name(source.artifact_name.clone(), &target), + })) + }); + ready(source).boxed() + } + arg::SourceKind::CurrentCiRun => + ok_ready_boxed(Source::External(ExternalSource::OngoingCiRun(OngoingCiRunSource { + artifact_name: resolve_artifact_name(source.artifact_name, &target), + }))), + arg::SourceKind::Release => { + let designator = source + .release + .context(format!("Missing {} argument.", T::RELEASE_DESIGNATOR_NAME)); + let resolved = designator + .and_then_async(|designator| self.resolve_release_source(target, designator)); + resolved.map_ok(|source| Source::External(ExternalSource::Release(source))).boxed() + } + }; + async move { Ok(GetTargetJob { inner: source.await?, destination }) } + .instrument(span.clone()) + .boxed() + } + + #[tracing::instrument] + pub fn resolve_release_source( + &self, + target: T, + designator: String, + ) -> BoxFuture<'static, Result> { + let repository = self.remote_repo.clone(); + let release = self.resolve_release_designator(designator); + release + .and_then_sync(move |release| { + Ok(ReleaseSource { + repository, + asset_id: target + .find_asset(release.assets) + .context(format!( + "Failed to find a relevant asset in the release '{}'.", + release.tag_name + ))? + .id, + }) + }) + .boxed() + } + + pub fn js_build_info(&self) -> BoxFuture<'static, Result> { + let triple = self.triple.clone(); + let commit = self.commit(); + async move { + Ok(gui::BuildInfo { + commit: commit.await?, + name: "Enso IDE".into(), + version: triple.versions.version.clone(), + engine_version: triple.versions.version.clone(), + }) + } + .boxed() + } + + // pub fn pm_info(&self) -> enso_build::project::backend::BuildInput { + // enso_build::project::backend::BuildInput { versions: self.triple.versions.clone() } + // } + + pub fn resolve_inputs( + &self, + inputs: ::BuildInput, + ) -> BoxFuture<'static, Result<::BuildInput>> { + T::resolve(self, inputs) + } + + pub fn resolve_watch_inputs( + &self, + inputs: ::WatchInput, + ) -> Result<::WatchInput> { + T::resolve_watch(self, inputs) + } + + pub fn resolve_build_job( + &self, + job: BuildJob, + ) -> BoxFuture<'static, Result>> { + let BuildJob { input, output_path } = job; + let input = self.resolve_inputs::(input); + async move { + Ok(WithDestination { destination: output_path.output_path, inner: input.await? }) + } + .boxed() + } + + pub fn resolve_watch_job( + &self, + job: WatchJob, + ) -> BoxFuture<'static, Result>> { + let WatchJob { build, watch_input } = job; + let build = self.resolve_build_job(build); + let watch_input = self.resolve_watch_inputs::(watch_input); + async move { Ok(WatchTargetJob { watch_input: watch_input?, build: build.await? }) }.boxed() + } + + pub fn watch( + &self, + job: WatchJob, + ) -> BoxFuture<'static, Result> { + let context = self.context(); + let job = self.resolve_watch_job(job); + let target = self.target::(); + async move { target?.watch(context, job.await?).await }.boxed() + } + + pub fn watch_and_wait( + &self, + job: WatchJob, + ) -> BoxFuture<'static, Result> { + let watcher = self.watch(job); + async move { watcher.await?.wait_for_finish().await }.boxed() + } + + pub fn get( + &self, + target_source: arg::Source, + ) -> BoxFuture<'static, Result> + where + Target: IsTarget + IsTargetSource + Send + Sync + 'static, + Target: Resolvable, + { + let target = self.target::(); + let get_task = self.target().map(|target| self.resolve(target, target_source)); + let context = self.context(); + async move { target?.get(context, get_task?.await?).await }.boxed() + } + + pub fn build(&self, job: BuildJob) -> BoxFuture<'static, Result> { + let context = self.context(); + let target = self.target::(); + let job = self.resolve_build_job(job); + async move { + let job = job.await?; + target?.build(context, job).await + } + .void_ok() + .boxed() + } + + pub fn handle_wasm(&self, wasm: arg::wasm::Target) -> BoxFuture<'static, Result> { + match wasm.command { + arg::wasm::Command::Watch(job) => self.watch_and_wait(job), + arg::wasm::Command::Build(job) => self.build(job).void_ok().boxed(), + arg::wasm::Command::Check => Wasm.check().boxed(), + arg::wasm::Command::Test { no_wasm, no_native } => + Wasm.test(self.repo_root.to_path_buf(), !no_wasm, !no_native).boxed(), + arg::wasm::Command::Get(source) => self.get(source).void_ok().boxed(), + } + } + + // pub fn handle_engine(&self, engine: arg::engine::Target) -> BoxFuture<'static, Result> { + // self.get(engine.source).void_ok().boxed() + // } + // + // pub fn handle_project_manager( + // &self, + // project_manager: arg::project_manager::Target, + // ) -> BoxFuture<'static, Result> { + // self.get(project_manager.source).void_ok().boxed() + // } + + pub fn handle_gui(&self, gui: arg::gui::Target) -> BoxFuture<'static, Result> { + match gui.command { + arg::gui::Command::Build(job) => self.build(job), + arg::gui::Command::Get(source) => self.get(source).void_ok().boxed(), + arg::gui::Command::Watch(job) => self.watch_and_wait(job), + } + } + + pub fn handle_runtime(&self, gui: arg::runtime::Target) -> BoxFuture<'static, Result> { + // todo!() + match gui.command { + arg::runtime::Command::Build(job) => self.build(job), + // arg::gui::Command::Get(source) => self.get(source).void_ok().boxed(), + // arg::gui::Command::Watch(job) => self.watch_and_wait(job), + } + } + + pub fn handle_backend(&self, backend: arg::backend::Target) -> BoxFuture<'static, Result> { + match backend.command { + arg::backend::Command::Build { source } => self.get(source).void_ok().boxed(), + arg::backend::Command::Upload { input } => { + let input = enso_build::project::Backend::resolve(self, input); + let repo = self.remote_repo.clone(); + let context = self.context(); + async move { + let input = input.await?; + let operation = enso_build::engine::Operation::Release( + enso_build::engine::ReleaseOperation { + repo, + command: enso_build::engine::ReleaseCommand::Upload, + }, + ); + let config = enso_build::engine::BuildConfigurationFlags { + build_engine_package: true, + build_launcher_bundle: true, + build_project_manager_bundle: true, + verify_packages: true, + ..default() + }; + let context = input.prepare_context(context, config)?; + context.execute(operation).await?; + Ok(()) + } + .boxed() + } + arg::backend::Command::Benchmark { which, minimal_run } => { + let config = enso_build::engine::BuildConfigurationFlags { + execute_benchmarks: which.into_iter().collect(), + execute_benchmarks_once: minimal_run, + ..default() + }; + let context = self.prepare_backend_context(config); + async move { + let context = context.await?; + context.build().await?; + Ok(()) + } + .boxed() + } + arg::backend::Command::Test { which } => { + let mut config = enso_build::engine::BuildConfigurationFlags::default(); + for arg in which { + match arg { + Tests::Scala => config.test_scala = true, + Tests::StandardLibrary => config.test_standard_library = true, + } + } + config.test_java_generated_from_rust = true; + let context = self.prepare_backend_context(config); + async move { context.await?.build().void_ok().await }.boxed() + } + arg::backend::Command::Sbt { command } => { + let context = self.prepare_backend_context(default()); + async move { + let mut command_pieces = vec![OsString::from("sbt")]; + command_pieces.extend(command.into_iter().map(into)); + + let operation = + enso_build::engine::Operation::Run(enso_build::engine::RunOperation { + command_pieces, + }); + + let context = context.await?; + context.execute(operation).await + } + .boxed() + } + arg::backend::Command::CiCheck {} => { + let config = enso_build::engine::BuildConfigurationFlags { + test_scala: true, + test_standard_library: true, + test_java_generated_from_rust: true, + build_benchmarks: true, + execute_benchmarks: once(Benchmarks::Runtime).collect(), + execute_benchmarks_once: true, + build_js_parser: matches!(TARGET_OS, OS::Linux), + verify_packages: true, + generate_documentation: true, + ..default() + }; + let context = self.prepare_backend_context(config); + async move { + let mut context = context.await?; + context.upload_artifacts = true; + context.build().await + } + .void_ok() + .boxed() + } + } + } + + #[instrument] + pub fn prepare_backend_context( + &self, + config: enso_build::engine::BuildConfigurationFlags, + ) -> BoxFuture<'static, Result> { + let paths = enso_build::paths::Paths::new_triple(&self.repo_root, self.triple.clone()); + let config = config.into(); + let octocrab = self.octocrab.clone(); + async move { + let paths = paths?; + let inner = crate::project::Context { + repo_root: paths.repo_root.clone(), + upload_artifacts: true, + octocrab, + cache: Cache::new_default().await?, + }; + Ok(enso_build::engine::RunContext { inner, config, paths, external_runtime: None }) + } + .boxed() + } + + pub fn handle_ide(&self, ide: arg::ide::Target) -> BoxFuture<'static, Result> { + match ide.command { + arg::ide::Command::Build { params } => self.build_ide(params).void_ok().boxed(), + arg::ide::Command::Upload { params, release_id } => { + let build_job = self.build_ide(params); + let remote_repo = self.remote_repo.clone(); + let client = self.octocrab.client.clone(); + async move { + let artifacts = build_job.await?; + upload_asset(&remote_repo, &client, release_id, &artifacts.image).await?; + upload_asset(&remote_repo, &client, release_id, &artifacts.image_checksum) + .await?; + Ok(()) + } + .boxed() + } + arg::ide::Command::Start { params, ide_option } => { + let build_job = self.build_ide(params); + async move { + let ide = build_job.await?; + ide.start_unpacked(ide_option).run_ok().await?; + Ok(()) + } + .boxed() + } + arg::ide::Command::Watch { project_manager, gui } => { + let gui_watcher = self.watch(gui); + let project_manager = self.spawn_project_manager(project_manager, None); + + async move { + let mut project_manager = project_manager.await?; + let mut gui_watcher = gui_watcher.await?; + gui_watcher.wait_for_finish().await?; + debug!("GUI watcher has finished, ending Project Manager process."); + project_manager.stdin.take(); // dropping stdin handle should make PM finish + project_manager.wait_ok().await?; + Ok(()) + } + .boxed() + } + arg::ide::Command::IntegrationTest { + external_backend, + project_manager, + wasm_pack_options, + headless, + wasm_timeout, + } => { + let custom_root = tempdir(); + let (custom_root, project_manager) = match custom_root { + Ok(tempdir) => { + let custom_root = Some(tempdir.path().into()); + ( + Some(tempdir), + Ok(self.spawn_project_manager(project_manager, custom_root)), + ) + } + Err(e) => (None, Err(e)), + }; + let source_root = self.repo_root.to_path_buf(); + async move { + let project_manager = + if !external_backend { Some(project_manager?.await?) } else { None }; + Wasm.integration_test( + source_root, + project_manager, + headless, + wasm_pack_options, + Some(wasm_timeout.into()), + ) + .await?; + // Custom root must live while the tests are being run. + drop(custom_root); + Ok(()) + } + .boxed() + } + } + } + + /// Spawns a Project Manager. + pub fn spawn_project_manager( + &self, + source: arg::Source, + custom_root: Option, + ) -> BoxFuture<'static, Result> { + let get_task = self.get(source); + async move { + let project_manager = get_task.await?; + let mut command = + enso_build::programs::project_manager::spawn_from(&project_manager.path); + if let Some(custom_root) = custom_root { + command + .set_env(enso_build::programs::project_manager::PROJECTS_ROOT, &custom_root)?; + } + command.spawn_intercepting() + } + .boxed() + } + + pub fn build_ide( + &self, + params: arg::ide::BuildInput, + ) -> BoxFuture<'static, Result> { + let arg::ide::BuildInput { gui, project_manager, output_path } = params; + let input = ide::BuildInput { + gui: self.get(gui), + project_manager: self.get(project_manager), + version: self.triple.versions.version.clone(), + }; + let target = Ide { target_os: self.triple.os, target_arch: self.triple.arch }; + let build_job = target.build(&self.context, input, output_path); + async move { + let artifacts = build_job.await?; + if is_in_env() { + artifacts.upload_as_ci_artifact().await?; + } + Ok(artifacts) + } + .boxed() + } + + pub fn target(&self) -> Result { + Target::prepare_target(self) + } +} + +pub trait Resolvable: IsTarget + IsTargetSource + Clone { + fn prepare_target(context: &Processor) -> Result; + + fn resolve( + ctx: &Processor, + from: ::BuildInput, + ) -> BoxFuture<'static, Result<::BuildInput>>; +} + +impl Resolvable for Wasm { + fn prepare_target(_context: &Processor) -> Result { + Ok(Wasm {}) + } + + fn resolve( + _ctx: &Processor, + from: ::BuildInput, + ) -> BoxFuture<'static, Result<::BuildInput>> { + let arg::wasm::BuildInput { + crate_path, + wasm_profile, + wasm_opt_option: wasm_opt_options, + cargo_options, + profiling_level, + wasm_size_limit, + skip_wasm_opt, + } = from; + ok_ready_boxed(wasm::BuildInput { + crate_path, + wasm_opt_options, + skip_wasm_opt, + extra_cargo_options: cargo_options, + profile: wasm_profile, + profiling_level: profiling_level.map(into), + wasm_size_limit: wasm_size_limit.filter(|size_limit| size_limit.get_bytes() > 0), + }) + } +} + +impl Resolvable for Gui { + fn prepare_target(_context: &Processor) -> Result { + Ok(Gui {}) + } + + fn resolve( + ctx: &Processor, + from: ::BuildInput, + ) -> BoxFuture<'static, Result<::BuildInput>> { + let wasm_source = ctx.resolve(Wasm, from.wasm); + let build_info = ctx.js_build_info(); + async move { Ok(gui::BuildInput { wasm: wasm_source.await?, build_info }) }.boxed() + } +} + +impl Resolvable for Runtime { + fn prepare_target(_context: &Processor) -> Result { + Ok(Runtime {}) + } + + fn resolve( + ctx: &Processor, + from: ::BuildInput, + ) -> BoxFuture<'static, Result<::BuildInput>> { + let arg::runtime::BuildInput {} = from; + ok_ready_boxed(runtime::BuildInput { versions: ctx.triple.versions.clone() }) + } +} + +impl Resolvable for Backend { + fn prepare_target(context: &Processor) -> Result { + Ok(Backend { target_os: context.triple.os }) + } + + fn resolve( + ctx: &Processor, + from: ::BuildInput, + ) -> BoxFuture<'static, Result<::BuildInput>> { + let arg::backend::BuildInput { runtime } = from; + let versions = ctx.triple.versions.clone(); + + let context = ctx.context.inner.clone(); + + ctx.resolve(Runtime, runtime) + .and_then_sync(|runtime| { + let external_runtime = runtime.to_external().map(move |external| { + Arc::new(move || { + Runtime + .get_external(context.clone(), external.clone()) + .map_ok(|artifact| artifact.into_inner()) + .boxed() + }) as Arc + }); + Ok(backend::BuildInput { external_runtime, versions }) + }) + .boxed() + // ok_ready_boxed(backend::BuildInput { versions: ctx.triple.versions.clone() }) + } +} + +// impl Resolvable for ProjectManager { +// fn prepare_target(_context: &Processor) -> Result { +// Ok(ProjectManager) +// } +// +// fn resolve( +// ctx: &Processor, +// _from: ::BuildInput, +// ) -> BoxFuture<'static, Result<::BuildInput>> { +// ok_ready_boxed(project_manager::BuildInput { +// repo_root: ctx.repo_root().path, +// versions: ctx.triple.versions.clone(), +// }) +// } +// } +// +// impl Resolvable for Engine { +// fn prepare_target(_context: &Processor) -> Result { +// Ok(Engine) +// } +// +// fn resolve( +// ctx: &Processor, +// _from: ::BuildInput, +// ) -> BoxFuture<'static, Result<::BuildInput>> { +// ok_ready_boxed(engine::BuildInput { +// repo_root: ctx.repo_root().path, +// versions: ctx.triple.versions.clone(), +// }) +// } +// } + +pub trait WatchResolvable: Resolvable + IsWatchableSource + IsWatchable { + fn resolve_watch( + ctx: &Processor, + from: ::WatchInput, + ) -> Result<::WatchInput>; +} + +impl WatchResolvable for Wasm { + fn resolve_watch( + _ctx: &Processor, + from: ::WatchInput, + ) -> Result<::WatchInput> { + Ok(wasm::WatchInput { cargo_watch_options: from.cargo_watch_option }) + } +} + +impl WatchResolvable for Gui { + fn resolve_watch( + ctx: &Processor, + from: ::WatchInput, + ) -> Result<::WatchInput> { + Ok(gui::WatchInput { wasm: Wasm::resolve_watch(ctx, from.wasm)?, shell: from.gui_shell }) + } +} + +#[tracing::instrument(err)] +pub async fn main_internal(config: enso_build::config::Config) -> Result { + setup_logging()?; + + // Setup that affects Cli parser construction. + if let Some(wasm_size_limit) = config.wasm_size_limit { + crate::arg::wasm::initialize_default_wasm_size_limit(wasm_size_limit)?; + } + + let cli = Cli::parse(); + + debug!("Parsed CLI arguments: {cli:#?}"); + + if !cli.skip_version_check { + config.check_programs().await?; + } + + // TRANSITION: Previous Engine CI job used to clone these both repositories side-by-side. + // This collides with GraalVM native image build location. + if is_in_env() { + remove_if_exists(cli.repo_path.join("enso"))?; + remove_if_exists(cli.repo_path.join("ci-build"))?; + } + + let ctx: Processor = Processor::new(&cli).instrument(info_span!("Building context.")).await?; + match cli.target { + Target::Wasm(wasm) => ctx.handle_wasm(wasm).await?, + Target::Gui(gui) => ctx.handle_gui(gui).await?, + Target::Runtime(runtime) => ctx.handle_runtime(runtime).await?, + // Target::ProjectManager(project_manager) => + // ctx.handle_project_manager(project_manager).await?, + // Target::Engine(engine) => ctx.handle_engine(engine).await?, + Target::Backend(backend) => ctx.handle_backend(backend).await?, + Target::Ide(ide) => ctx.handle_ide(ide).await?, + // TODO: consider if out-of-source ./dist should be removed + Target::GitClean(options) => { + let mut exclusions = vec![".idea"]; + if !options.build_script { + exclusions.push("target/enso-build"); + } + + let git_clean = clean::clean_except_for(&ctx.repo_root, exclusions); + let clean_cache = async { + if options.cache { + ide_ci::fs::tokio::remove_dir_if_exists(ctx.cache.path()).await?; + } + Result::Ok(()) + }; + try_join(git_clean, clean_cache).await?; + } + Target::Lint => { + Cargo + .cmd()? + .current_dir(&ctx.repo_root) + .arg(cargo::clippy::COMMAND) + .apply(&cargo::Options::Workspace) + .apply(&cargo::Options::Package("enso-integration-test".into())) + .apply(&cargo::Options::AllTargets) + .apply(&cargo::Color::Always) + .arg("--") + .apply(&rustc::Option::Deny(rustc::Lint::Warnings)) + .run_ok() + .await?; + + Cargo + .cmd()? + .current_dir(&ctx.repo_root) + .arg("fmt") + .args(["--", "--check"]) + .run_ok() + .await?; + + prettier::check(&ctx.repo_root).await?; + } + Target::Fmt => { + let prettier = prettier::write(&ctx.repo_root); + let our_formatter = + enso_formatter::process_path(&ctx.repo_root, enso_formatter::Action::Format); + // our_formatter.await?; + // prettier.await?; + let (r1, r2) = join!(prettier, our_formatter).await; + r1?; + r2?; + } + Target::Release(release) => match release.action { + Action::CreateDraft => { + enso_build::release::create_release(&ctx).await?; + } + Action::DeployToEcr(args) => { + enso_build::release::deploy_to_ecr(&ctx, args.ecr_repository).await?; + enso_build::release::dispatch_cloud_image_build_action( + &ctx.octocrab, + &ctx.triple.versions.version, + ) + .await?; + } + Action::Publish => { + enso_build::release::publish_release(&ctx).await?; + } + }, + Target::CiGen => ci_gen::generate( + &enso_build::paths::generated::RepoRootGithubWorkflows::new(cli.repo_path), + )?, + Target::JavaGen(command) => { + let repo_root = ctx.repo_root.clone(); + async move { + let generate_job = enso_build::rust::parser::generate_java(&repo_root); + match command.action { + java_gen::Command::Build => generate_job.await, + java_gen::Command::Test => { + generate_job.await?; + let backend_context = ctx.prepare_backend_context(default()).await?; + backend_context.prepare_build_env().await?; + enso_build::rust::parser::run_self_tests(&repo_root).await + } + } + } + .await?; + } + Target::ChangelogCheck => { + let ci_context = ide_ci::actions::context::Context::from_env()?; + enso_build::changelog::check::check(ctx.repo_root.clone(), ci_context).await?; + } + }; + info!("Completed main job."); + global::complete_tasks().await?; + Ok(()) +} + +pub fn lib_main(config: enso_build::config::Config) -> Result { + let rt = tokio::runtime::Runtime::new()?; + rt.block_on(async { main_internal(config).await })?; + rt.shutdown_timeout(Duration::from_secs(60 * 30)); + info!("Successfully ending."); + Ok(()) +} + + +// #[cfg(test)] +// mod tests { +// use super::*; +// use enso_build::version::Versions; +// use ide_ci::models::config::RepoContext; +// +// #[tokio::test] +// async fn resolving_release() -> Result { +// setup_logging()?; +// let octocrab = Octocrab::default(); +// let context = Processor { +// context: BuildContext { +// remote_repo: RepoContext::from_str("enso-org/enso")?, +// triple: TargetTriple::new(Versions::new(Version::new(2022, 1, 1))), +// source_root: r"H:/NBO/enso5".into(), +// octocrab, +// cache: Cache::new_default().await?, +// }, +// }; +// +// dbg!( +// context.resolve_release_source(Backend { target_os: TARGET_OS }, +// "latest".into()).await )?; +// +// Ok(()) +// } +// } diff --git a/build/src/main.rs b/build/cli/src/main.rs similarity index 50% rename from build/src/main.rs rename to build/cli/src/main.rs index 21a899b422..ee7cd186f3 100644 --- a/build/src/main.rs +++ b/build/cli/src/main.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use enso_build::prelude::*; @@ -8,7 +9,5 @@ use enso_build::prelude::*; fn main() -> Result { - let build_config_yaml = include_str!("../../build-config.yaml"); - let config = enso_build::config::load_yaml(build_config_yaml)?; - enso_build_cli::lib_main(config) + enso_build_cli::lib_main(Default::default()) } diff --git a/build/build-utils/Cargo.toml b/build/deprecated/build-utils/Cargo.toml similarity index 82% rename from build/build-utils/Cargo.toml rename to build/deprecated/build-utils/Cargo.toml index 94bd62180e..f1d3b90f62 100644 --- a/build/build-utils/Cargo.toml +++ b/build/deprecated/build-utils/Cargo.toml @@ -5,9 +5,10 @@ authors = ["Enso Team "] edition = "2021" [dependencies] +ide-ci = { path = "../../ci_utils" } path-clean = "0.1.0" serde = { version = "1.0", features = ["derive"] } [dependencies.reqwest] -version = "0.10.6" +version = "0.11.12" features = ["blocking", "json"] diff --git a/build/deprecated/build-utils/src/lib.rs b/build/deprecated/build-utils/src/lib.rs new file mode 100644 index 0000000000..ebbe5da356 --- /dev/null +++ b/build/deprecated/build-utils/src/lib.rs @@ -0,0 +1,48 @@ +//! A crate with many utilities for build scripts, for example downloading packages form GitHub or +//! easier management of env vars and paths. + +// === Features === +#![feature(trait_alias)] +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] +// === Non-Standard Linter Configuration === +#![allow(clippy::option_map_unit_fn)] +#![allow(clippy::precedence)] +#![allow(dead_code)] +#![deny(unconditional_recursion)] +#![warn(missing_copy_implementations)] +#![warn(missing_debug_implementations)] +#![warn(missing_docs)] +#![warn(trivial_casts)] +#![warn(trivial_numeric_casts)] +#![warn(unused_import_braces)] +#![warn(unused_qualifications)] + +use ide_ci::prelude::*; + + + +// ===================== +// === GithubRelease === +// ===================== + +/// A structure describing a concrete release package on GitHub. The [`project_url`] should be a +/// project's main page on GitHub. +#[derive(Debug)] +#[allow(missing_docs)] +pub struct GithubRelease { + pub project_url: T, + pub version: T, + pub filename: T, +} + +impl + Display> GithubRelease { + /// URL that can be used to download this asset from a GitHub release. + pub fn url(&self) -> Result { + format!("{}/releases/download/{}/{}", self.project_url, self.version, self.filename) + .parse2() + } +} diff --git a/build/rust-scripts/Cargo.toml b/build/deprecated/rust-scripts/Cargo.toml similarity index 100% rename from build/rust-scripts/Cargo.toml rename to build/deprecated/rust-scripts/Cargo.toml diff --git a/build/rust-scripts/src/bin/test_all.rs b/build/deprecated/rust-scripts/src/bin/test_all.rs similarity index 99% rename from build/rust-scripts/src/bin/test_all.rs rename to build/deprecated/rust-scripts/src/bin/test_all.rs index 6d39958753..d6b9148ed3 100644 --- a/build/rust-scripts/src/bin/test_all.rs +++ b/build/deprecated/rust-scripts/src/bin/test_all.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use std::path::Path; diff --git a/build/enso-formatter/Cargo.toml b/build/enso-formatter/Cargo.toml index fb3f93eb9c..db6c0eb01b 100644 --- a/build/enso-formatter/Cargo.toml +++ b/build/enso-formatter/Cargo.toml @@ -6,4 +6,5 @@ edition = "2021" [dependencies] regex = "1" -lazy_static = "1.4.0" +ide-ci = { path = "../ci_utils" } +tokio = { workspace = true } diff --git a/build/enso-formatter/src/lib.rs b/build/enso-formatter/src/lib.rs new file mode 100644 index 0000000000..7366f5a678 --- /dev/null +++ b/build/enso-formatter/src/lib.rs @@ -0,0 +1,654 @@ +//! This crate implements code formatter rules that are not implemented in rustfmt. These rules +//! are this codebase specific, and they may not be desired in other code bases, including: +//! - Sorting imports into groups (e.g. local imports, pub imports, etc.). +//! - Sorting module attributes into groups. +//! - Adding standard lint configuration to `lib.rs` and `main.rs` files. +//! - (Currently disabled) Emitting warnings about star imports that are not ending with `traits::*` +//! nor `prelude::*`. +//! +//! Possible extensions, not implemented yet: +//! - Sections are automatically keeping spacing. + +// === Features === +#![feature(exit_status_error)] +#![feature(option_result_contains)] +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] +// === Non-Standard Linter Configuration === +#![allow(missing_docs)] +#![deny(keyword_idents)] +#![deny(macro_use_extern_crate)] +#![deny(missing_abi)] +#![deny(pointer_structural_match)] +#![deny(unsafe_op_in_unsafe_fn)] +#![deny(unconditional_recursion)] +#![warn(absolute_paths_not_starting_with_crate)] +#![warn(elided_lifetimes_in_paths)] +#![warn(explicit_outlives_requirements)] +#![warn(missing_copy_implementations)] +#![warn(missing_debug_implementations)] +#![warn(noop_method_call)] +#![warn(single_use_lifetimes)] +#![warn(trivial_casts)] +#![warn(trivial_numeric_casts)] +#![warn(unused_crate_dependencies)] +#![warn(unused_extern_crates)] +#![warn(unused_import_braces)] +#![warn(unused_lifetimes)] +#![warn(unused_qualifications)] +#![warn(variant_size_differences)] +#![warn(unreachable_pub)] + +use ide_ci::prelude::*; + +use ide_ci::fs::tokio as fs; +use regex::Regex; +use std::collections::hash_map::DefaultHasher; +use std::hash::Hasher; +use tokio as _; + + + +// ================= +// === Constants === +// ================= + +// TODO: The below lints should be uncommented, one-by-one, and the existing code should be +// adjusted. + +/// Standard linter configuration. It will be used in every `main.rs` and `lib.rs` file in the +/// codebase. +const STD_LINTER_ATTRIBS: &[&str] = &[ + // Rustc lints that are allowed by default: + // "warn(absolute_paths_not_starting_with_crate)", + // "warn(elided_lifetimes_in_paths)", + // "warn(explicit_outlives_requirements)", + // "deny(keyword_idents)", + // "deny(macro_use_extern_crate)", + // "deny(missing_abi)", + // "warn(missing_copy_implementations)", + // "warn(missing_debug_implementations)", + // "warn(missing_docs)", + "deny(non_ascii_idents)", + // "warn(noop_method_call)", + // "deny(pointer_structural_match)", + // "warn(single_use_lifetimes)", + // "warn(trivial_casts)", + // "warn(trivial_numeric_casts)", + "warn(unsafe_code)", + // "deny(unsafe_op_in_unsafe_fn)", + // "warn(unused_crate_dependencies)", + // "warn(unused_extern_crates)", + // "warn(unused_import_braces)", + // "warn(unused_lifetimes)", + // "warn(unused_qualifications)", + // "warn(variant_size_differences)", + // Rustc lints that emit a warning by default: + // "deny(unconditional_recursion)", + // The code triggering this lint is usually more readable than the suggested alternative. + "allow(clippy::bool_to_int_with_if)", + // This is allowed because in some cases, it allows way nicer formatting. For example, the + // code: ``` + // fn test(x: usize) -> usize { + // if x > 1 { + // 0 + // } else { + // 1 + // } + // ``` + // is automatically formatted as a multi-line expression. However, it is shorter when using a + // local variable and it cannot be configured in rustfmt. + // ``` + // fn test(x: usize) -> usize { + // let out = if x > 1 { 0 } else { 1 }; + // out + // } + // ``` + "allow(clippy::let_and_return)", +]; + + + +// ============= +// === Utils === +// ============= + +pub fn calculate_hash(t: &T) -> u64 { + let mut s = DefaultHasher::new(); + t.hash(&mut s); + s.finish() +} + +pub async fn read_file_with_hash(path: impl AsRef) -> Result<(u64, String)> { + ide_ci::fs::tokio::read_to_string(path).await.map(|content| { + let hash = calculate_hash(&content); + (hash, content) + }) +} + + + +// =================== +// === HeaderToken === +// =================== + +use HeaderToken::*; + +/// A token that can be found in the header of a file. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[allow(missing_docs)] +pub enum HeaderToken { + Attrib, + ModuleAttrib, + ModuleAttribWarn, + ModuleAttribAllow, + ModuleAttribDeny, + ModuleAttribFeature, + ModuleAttribAllowIncFeat, + EmptyLine, + ModuleDoc, + Comment, + CrateUse, + CrateUseStar, + CratePubUse, + CratePubUseStar, + Use, + UseStar, + PubUse, + PubUseStar, + PubMod, + /// Special header token that is never parsed, but can be injected by the code. + ModuleComment, + StandardLinterConfig, +} + +/// A header token with the matched string and possibly attached attributes. +#[derive(Clone)] +pub struct HeaderElement { + attrs: Vec, + token: HeaderToken, + reg_match: String, +} + +impl Debug for HeaderElement { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}({:?})", self.token, self.reg_match.as_str()) + } +} + +impl HeaderElement { + /// Constructor. + pub fn new(token: HeaderToken, reg_match: String) -> Self { + let attrs = Default::default(); + Self { attrs, token, reg_match } + } + + /// Check whether the element is empty. + pub fn is_empty(&self) -> bool { + self.reg_match.is_empty() + } + + /// Length of the splice. Includes the length of the matched string and all attached attributes. + pub fn len(&self) -> usize { + let args_len: usize = self.attrs.iter().map(|t| t.len()).sum(); + self.reg_match.len() + args_len + } + + /// Convert the element to a string representation. + #[allow(clippy::inherent_to_string)] + pub fn to_string(&self) -> String { + format!("{}{}", self.attrs.join(""), self.reg_match) + } +} + +/// Regex constructor that starts on the beginning of a line, can be surrounded by whitespaces and +/// ends with a line break. +fn header_line_regex(input: &str) -> Regex { + let str = format!(r"^ *{} *(; *)?((\r\n?)|\n)", input); + Regex::new(&str).unwrap() +} + +macro_rules! define_rules { + ($($name:ident = $re:tt;)*) => { + #[allow(non_upper_case_globals)] + mod static_re { + use super::*; + lazy_static! { + $( + pub static ref $name: Regex = header_line_regex($re); + )* + } + } + + fn match_header(input: &str) -> Option { + $( + if let Some(str) = static_re::$name.find(input) { + return Some(HeaderElement::new($name, str.as_str().into())); + } + )* + None + } + }; +} + +define_rules! { + EmptyLine = r""; + ModuleDoc = r"//![^\n\r]*"; + Comment = r"//[^\n\r]*"; + CrateUse = r"use +crate( *:: *[\w]+)*( +as +[\w]+)?"; + CrateUseStar = r"use +crate( *:: *[\w*]+)*"; + CratePubUse = r"pub +use +crate( *:: *[\w]+)*( +as +[\w]+)?"; + CratePubUseStar = r"pub +use +crate( *:: *[\w*]+)*"; + Use = r"use +[\w]+( *:: *[\w]+)*( +as +[\w]+)?"; + UseStar = r"use +[\w]+( *:: *[\w*]+)*"; + PubUse = r"pub +use +[\w]+( *:: *[\w]+)*( +as +[\w]+)?"; + PubUseStar = r"pub +use +[\w]+( *:: *[\w*]+)*"; + ModuleAttribFeature = r"#!\[feature[^\]]*\]"; + ModuleAttribAllowIncFeat = r"#!\[allow\(incomplete_features\)\]"; + ModuleAttribWarn = r"#!\[warn[^\]]*\]"; + ModuleAttribAllow = r"#!\[allow[^\]]*\]"; + ModuleAttribDeny = r"#!\[deny[^\]]*\]"; + ModuleAttrib = r"#!\[[^\]]*\]"; + Attrib = r"#\[[^\]]*\]"; + PubMod = r"pub +mod +[\w]+"; +} + + + +// ======================= +// === Pretty printing === +// ======================= + +/// Prints H1 section if any of the provided tokens was used in the file being formatted. +fn print_h1( + out: &mut String, + map: &HashMap>, + tokens: &[HeaderToken], + str: &str, +) { + use std::fmt::Write; + + if tokens.iter().any(|tok| map.contains_key(tok)) { + writeln!(out).unwrap(); + writeln!(out, "// ===={}====", "=".repeat(str.len())).unwrap(); + writeln!(out, "// === {} ===", str).unwrap(); + writeln!(out, "// ===={}====", "=".repeat(str.len())).unwrap(); + writeln!(out).unwrap(); + } +} + +/// Prints H2 section if any of the provided tokens was used in the file being formatted. +fn print_h2( + out: &mut String, + map: &HashMap>, + tokens: &[HeaderToken], + str: &str, +) { + use std::fmt::Write; + + if tokens.iter().map(|tok| map.contains_key(tok)).any(|t| t) { + writeln!(out, "// === {} ===", str).unwrap() + } +} + +/// Prints all the entries associated with the provided tokens. If at least one entry was printed, +/// an empty line will be added in the end. +fn print(out: &mut String, map: &mut HashMap>, t: &[HeaderToken]) -> bool { + // We collect the results because we want all tokens to be printed. + let sub_results: Vec = t.iter().map(|t| print_single(out, map, *t)).collect(); + sub_results.iter().any(|t| *t) +} + +/// Prints all the entries associated with the provided tokens. If at least one entry was printed, +/// an empty line will be added in the end. +fn print_section(out: &mut String, map: &mut HashMap>, t: &[HeaderToken]) { + if print(out, map, t) { + out.push('\n'); + } +} + +/// Print all the entries associated with the provided token. +fn print_single( + out: &mut String, + map: &mut HashMap>, + token: HeaderToken, +) -> bool { + match map.remove(&token) { + None => false, + Some(t) => { + out.push_str(&t.join("")); + true + } + } +} + + + +// ============== +// === Action === +// ============== + +/// Possible commands this formatter can evaluate. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[allow(missing_docs)] +pub enum Action { + Format, + DryRun, + FormatAndCheck, +} + + + +// ================== +// === Processing === +// ================== + +/// A path to rust source annottated with information whether it is a main or a library main source +/// file. +#[derive(Clone, Debug)] +#[allow(missing_docs)] +pub struct RustSourcePath { + path: PathBuf, + is_main: bool, +} + +/// Process all files of the given path recursively. +/// +/// Please note that the [`hash_map`] variable contains hashes of all files before processing. After +/// the processing is done by this formatter, `rustfmt` is run on the whole codebase, and the hashes +/// are compared with new files. This allows checking if running the formatting changed the files. +/// An alternative design is possible – we could run this formatter and pass its output to stdin of +/// `rustfmt`, run it in memory, and get the results without affecting files on the disk. +/// Unfortunately, such solution requires either running a separate `rustfmt` process per file, or +/// using its API. The former solution is very slow (16 seconds for the whole codebase), the second +/// uses non-documented API and is slow as well (8 seconds for the whole codebase). It should be +/// possible to improve the latter solution to get good performance, but it seems way harder than it +/// should be. +pub async fn process_path(path: impl AsRef, action: Action) -> Result { + let paths = discover_paths(&path)?; + let total = paths.len(); + let mut hash_map = HashMap::::new(); + for (i, sub_path) in paths.iter().enumerate() { + let dbg_msg = if sub_path.is_main { " [main]" } else { "" }; + info!("[{}/{}] Processing {}{}.", i + 1, total, sub_path.path.display(), dbg_msg); + let hash = process_file(&sub_path.path, action, sub_path.is_main).await?; + hash_map.insert((&sub_path.path).into(), hash); + } + if action == Action::Format || action == Action::FormatAndCheck { + ide_ci::programs::cargo::fmt::format(&path).await?; + } + + if action == Action::FormatAndCheck { + let mut changed = Vec::new(); + for sub_path in &paths { + let (hash, _) = read_file_with_hash(&sub_path.path).await?; + if hash_map.get(&sub_path.path) != Some(&hash) { + changed.push(sub_path.path.clone()); + } + } + ensure!(changed.is_empty(), "{} files changed:\n{:#?}", changed.len(), changed); + } + Ok(()) +} + +/// Discover all paths containing Rust sources, recursively. +pub fn discover_paths(path: impl AsRef) -> Result> { + let mut vec = Vec::default(); + discover_paths_internal(&mut vec, path, false)?; + Ok(vec) +} + +pub fn discover_paths_internal( + vec: &mut Vec, + path: impl AsRef, + is_main_dir: bool, +) -> Result { + use ide_ci::fs; + let path = path.as_ref(); + let md = fs::metadata(path)?; + if md.is_dir() && !path.file_name().contains(&"target") { + let dir_name = path.file_name(); + // FIXME: This should cover 'tests' folder also, but only the files that contain actual + // tests. Otherwise, not all attributes are allowed there. + let is_main_dir = dir_name.contains(&"bin"); // || dir_name == Some(OsStr::new("tests")); + let sub_paths = fs::read_dir(path)?; + for sub_path in sub_paths { + discover_paths_internal(vec, &sub_path?.path(), is_main_dir)?; + } + } else if md.is_file() && path.extension().contains(&"rs") { + let is_main_file = path + .file_name() + .map_or(false, |file_name| file_name == "main.rs" || file_name == "lib.rs"); + let is_main = is_main_file || is_main_dir; + let path = path.into(); + vec.push(RustSourcePath { path, is_main }); + } + Ok(()) +} + +#[context("Failed to process file {}", path.as_ref().display())] +pub async fn process_file( + path: impl AsRef, + action: Action, + is_main_file: bool, +) -> Result { + let path = path.as_ref(); + let (hash, input) = read_file_with_hash(path).await?; + let out = process_file_content(input, is_main_file)?; + if action == Action::DryRun { + println!("{}", out) + } else if action == Action::Format || action == Action::FormatAndCheck { + fs::write(path, out).await?; + } + Ok(hash) +} + +/// Process a single source file. +pub fn process_file_content(input: String, is_main_file: bool) -> Result { + let mut str_ptr: &str = &input; + let mut attrs = vec![]; + let mut header = vec![]; + loop { + match match_header(str_ptr) { + None => break, + Some(mut m) => { + str_ptr = &str_ptr[m.len()..]; + match m.token { + Attrib => attrs.push(m), + _ => { + if !attrs.is_empty() { + let old_attrs = std::mem::take(&mut attrs); + m.attrs = old_attrs.into_iter().map(|t| t.reg_match).collect(); + } + header.push(m) + } + } + } + } + } + + // Do not consume the trailing comments. + let mut ending: Vec<&HeaderElement> = header + .iter() + .rev() + .take_while(|t| (t.token == Comment) || (t.token == EmptyLine)) + .collect(); + ending.reverse(); + let incorrect_ending_len = ending.into_iter().skip_while(|t| t.token == EmptyLine).count(); + header.truncate(header.len() - incorrect_ending_len); + let total_len: usize = header.iter().map(|t| t.len()).sum(); + + // Mark comments before any definitions as module comments. + header + .iter_mut() + .take_while(|t| (t.token == Comment) || (t.token == EmptyLine) || (t.token == ModuleDoc)) + .map(|t| { + if t.token == Comment && !t.reg_match.starts_with("// ===") { + t.token = ModuleComment; + } + }) + .for_each(drop); + + // Error if the import section contains comments. + let contains_comments = + header.iter().find(|t| t.token == Comment && !t.reg_match.starts_with("// ===")); + if let Some(comment) = contains_comments { + bail!( + "File contains comments in the import section. This is not allowed:\n{}", + comment.reg_match + ); + } + + // Error if the star import is used for non prelude- or traits-like imports. + // TODO: This is commented for now because it requires several non-trivial changes in the code. + // let invalid_star_import = header.iter().any(|t| { + // t.token == UseStar + // && !t.reg_match.contains("prelude::*") + // && !t.reg_match.contains("traits::*") + // && !t.reg_match.contains("super::*") + // }); + // + // if invalid_star_import { + // Err("Star imports only allowed for `prelude`, `traits`, and `super` + // modules.".to_string())?; } + + // Build a mapping between tokens and registered entries. + let mut map = HashMap::>::new(); + for elem in header { + map.entry(elem.token).or_default().push(elem.to_string()); + } + + // Remove standard linter configuration from the configuration found in the file. + if is_main_file { + let vec = map.entry(ModuleAttribAllow).or_default(); + vec.retain(|t| !STD_LINTER_ATTRIBS.iter().map(|s| t.contains(s)).any(|b| b)); + if vec.is_empty() { + map.remove(&ModuleAttribAllow); + } + + let vec = map.entry(ModuleAttribDeny).or_default(); + vec.retain(|t| !STD_LINTER_ATTRIBS.iter().map(|s| t.contains(s)).any(|b| b)); + if vec.is_empty() { + map.remove(&ModuleAttribDeny); + } + + let vec = map.entry(ModuleAttribWarn).or_default(); + vec.retain(|t| !STD_LINTER_ATTRIBS.iter().map(|s| t.contains(s)).any(|b| b)); + if vec.is_empty() { + map.remove(&ModuleAttribWarn); + } + + let std_linter_attribs = STD_LINTER_ATTRIBS.iter().map(|t| format!("#![{}]\n", t)); + map.entry(StandardLinterConfig).or_default().extend(std_linter_attribs); + } + + // Print the results. + let mut out = String::new(); + print_section(&mut out, &mut map, &[ModuleDoc]); + print_section(&mut out, &mut map, &[ModuleComment]); + print_section(&mut out, &mut map, &[ModuleAttrib]); + print_h2(&mut out, &map, &[ModuleAttribAllowIncFeat, ModuleAttribFeature], "Features"); + print_section(&mut out, &mut map, &[ModuleAttribAllowIncFeat, ModuleAttribFeature]); + if !STD_LINTER_ATTRIBS.is_empty() { + print_h2(&mut out, &map, &[StandardLinterConfig], "Standard Linter Configuration"); + print_section(&mut out, &mut map, &[StandardLinterConfig]); + } + print_h2( + &mut out, + &map, + &[ModuleAttribAllow, ModuleAttribDeny, ModuleAttribWarn], + "Non-Standard Linter Configuration", + ); + print_section(&mut out, &mut map, &[ModuleAttribAllow, ModuleAttribDeny, ModuleAttribWarn]); + + print_section(&mut out, &mut map, &[CrateUseStar, UseStar]); + print_section(&mut out, &mut map, &[CrateUse]); + print_section(&mut out, &mut map, &[Use]); + + print_h1(&mut out, &map, &[PubMod, CratePubUseStar, PubUseStar, CratePubUse, PubUse], "Export"); + print_section(&mut out, &mut map, &[PubMod]); + print_section(&mut out, &mut map, &[CratePubUseStar, PubUseStar, CratePubUse, PubUse]); + out.push_str("\n\n"); + out.push_str(&input[total_len..]); + Ok(out) +} + + + +// ============= +// === Tests === +// ============= + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_formatting() -> Result { + let input = r#"//! Module-level documentation +//! written in two lines. + +#![warn(missing_copy_implementations)] +#![allow(incomplete_features)] +#![recursion_limit = "512"] +pub use lib_f::item_1; +pub mod mod1; +use crate::prelude::*; +use crate::lib_b; +use lib_c; +pub use crate::lib_e; +use crate::lib_a; +use lib_d::item_1; +use logger::traits::*; +pub mod mod2; +pub struct Struct1 {} +"#; + + let output = r#"//! Module-level documentation +//! written in two lines. + +#![recursion_limit = "512"] + +// === Features === +#![allow(incomplete_features)] + +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] + +// === Non-Standard Linter Configuration === +#![warn(missing_copy_implementations)] + +use crate::prelude::*; +use logger::traits::*; + +use crate::lib_b; +use crate::lib_a; + +use lib_c; +use lib_d::item_1; + + +// ============== +// === Export === +// ============== + +pub mod mod1; +pub mod mod2; + +pub use crate::lib_e; +pub use lib_f::item_1; + + + +pub struct Struct1 {} +"#; + assert_eq!(process_file_content(input.into(), true)?, output); + Ok(()) + } +} diff --git a/build/enso-formatter/src/main.rs b/build/enso-formatter/src/main.rs index b6448ebec8..4a3355185f 100644 --- a/build/enso-formatter/src/main.rs +++ b/build/enso-formatter/src/main.rs @@ -1,651 +1,16 @@ -//! This crate implements code formatter rules that are not implemented in rustfmt. These rules -//! are this codebase specific, and they may not be desired in other code bases, including: -//! - Sorting imports into groups (e.g. local imports, pub imports, etc.). -//! - Sorting module attributes into groups. -//! - Adding standard lint configuration to `lib.rs` and `main.rs` files. -//! - (Currently disabled) Emitting warnings about star imports that are not ending with `traits::*` -//! nor `prelude::*`. -//! -//! Possible extensions, not implemented yet: -//! - Sections are automatically keeping spacing. - -// === Features === -#![feature(exit_status_error)] -// === Standard Linter Configuration === -#![deny(non_ascii_idents)] -#![warn(unsafe_code)] -#![allow(clippy::let_and_return)] -// === Non-Standard Linter Configuration === -#![deny(keyword_idents)] -#![deny(macro_use_extern_crate)] -#![deny(missing_abi)] -#![deny(pointer_structural_match)] -#![deny(unsafe_op_in_unsafe_fn)] -#![deny(unconditional_recursion)] -#![warn(missing_docs)] -#![warn(absolute_paths_not_starting_with_crate)] -#![warn(elided_lifetimes_in_paths)] -#![warn(explicit_outlives_requirements)] -#![warn(missing_copy_implementations)] -#![warn(missing_debug_implementations)] -#![warn(noop_method_call)] -#![warn(single_use_lifetimes)] -#![warn(trivial_casts)] -#![warn(trivial_numeric_casts)] -#![warn(unused_crate_dependencies)] -#![warn(unused_extern_crates)] -#![warn(unused_import_braces)] -#![warn(unused_lifetimes)] -#![warn(unused_qualifications)] -#![warn(variant_size_differences)] -#![warn(unreachable_pub)] - -use lazy_static::lazy_static; -use regex::Regex; -use std::collections::hash_map::DefaultHasher; -use std::collections::HashMap; -use std::ffi::OsStr; -use std::fmt::Debug; -use std::fs; -use std::hash::Hash; -use std::hash::Hasher; -use std::path::Path; -use std::path::PathBuf; -use std::process::Command; -use std::process::Stdio; - - - -// ================= -// === Constants === -// ================= - -// TODO: The below lints should be uncommented, one-by-one, and the existing code should be -// adjusted. - -/// Standard linter configuration. It will be used in every `main.rs` and `lib.rs` file in the -/// codebase. -const STD_LINTER_ATTRIBS: &[&str] = &[ - // Rustc lints that are allowed by default: - // "warn(absolute_paths_not_starting_with_crate)", - // "warn(elided_lifetimes_in_paths)", - // "warn(explicit_outlives_requirements)", - // "deny(keyword_idents)", - // "deny(macro_use_extern_crate)", - // "deny(missing_abi)", - // "warn(missing_copy_implementations)", - // "warn(missing_debug_implementations)", - // "warn(missing_docs)", - "deny(non_ascii_idents)", - // "warn(noop_method_call)", - // "deny(pointer_structural_match)", - // "warn(single_use_lifetimes)", - // "warn(trivial_casts)", - // "warn(trivial_numeric_casts)", - "warn(unsafe_code)", - // "deny(unsafe_op_in_unsafe_fn)", - // "warn(unused_crate_dependencies)", - // "warn(unused_extern_crates)", - // "warn(unused_import_braces)", - // "warn(unused_lifetimes)", - // "warn(unused_qualifications)", - // "warn(variant_size_differences)", - // Rustc lints that emit a warning by default: - // "deny(unconditional_recursion)", - // This is allowed because in some cases, it allows way nicer formatting. For example, the - // code: ``` - // fn test(x: usize) -> usize { - // if x > 1 { - // 0 - // } else { - // 1 - // } - // ``` - // is automatically formatted as a multi-line expression. However, it is shorter when using a - // local variable and it cannot be configured in rustfmt. - // ``` - // fn test(x: usize) -> usize { - // let out = if x > 1 { 0 } else { 1 }; - // out - // } - // ``` - "allow(clippy::let_and_return)", -]; - - - -// ============= -// === Utils === -// ============= - -fn calculate_hash(t: &T) -> u64 { - let mut s = DefaultHasher::new(); - t.hash(&mut s); - s.finish() -} - -fn read_file_with_hash(path: impl AsRef) -> std::io::Result<(u64, String)> { - fs::read_to_string(path).map(|t| (calculate_hash(&t), t)) -} - - - -// =================== -// === HeaderToken === -// =================== - -use HeaderToken::*; - -/// A token that can be found in the header of a file. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -#[allow(missing_docs)] -pub enum HeaderToken { - Attrib, - ModuleAttrib, - ModuleAttribWarn, - ModuleAttribAllow, - ModuleAttribDeny, - ModuleAttribFeature, - ModuleAttribAllowIncFeat, - EmptyLine, - ModuleDoc, - Comment, - CrateUse, - CrateUseStar, - CratePubUse, - CratePubUseStar, - Use, - UseStar, - PubUse, - PubUseStar, - PubMod, - /// Special header token that is never parsed, but can be injected by the code. - ModuleComment, - StandardLinterConfig, -} - -/// A header token with the matched string and possibly attached attributes. -#[derive(Clone)] -pub struct HeaderElement { - attrs: Vec, - token: HeaderToken, - reg_match: String, -} - -impl Debug for HeaderElement { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}({:?})", self.token, self.reg_match.as_str()) - } -} - -impl HeaderElement { - /// Constructor. - pub fn new(token: HeaderToken, reg_match: String) -> Self { - let attrs = Default::default(); - Self { attrs, token, reg_match } - } - - /// Check whether the element is empty. - pub fn is_empty(&self) -> bool { - self.reg_match.is_empty() - } - - /// Length of the splice. Includes the length of the matched string and all attached attributes. - pub fn len(&self) -> usize { - let args_len: usize = self.attrs.iter().map(|t| t.len()).sum(); - self.reg_match.len() + args_len - } - - /// Convert the element to a string representation. - #[allow(clippy::inherent_to_string)] - pub fn to_string(&self) -> String { - format!("{}{}", self.attrs.join(""), self.reg_match) - } -} - -/// Regex constructor that starts on the beginning of a line, can be surrounded by whitespaces and -/// ends with a line break. -fn header_line_regex(input: &str) -> Regex { - let str = format!(r"^ *{} *(; *)?((\r\n?)|\n)", input); - Regex::new(&str).unwrap() -} - -macro_rules! define_rules { - ($($name:ident = $re:tt;)*) => { - #[allow(non_upper_case_globals)] - mod static_re { - use super::*; - lazy_static! { - $( - pub static ref $name: Regex = header_line_regex($re); - )* - } - } - - fn match_header(input: &str) -> Option { - $( - if let Some(str) = static_re::$name.find(input) { - return Some(HeaderElement::new($name, str.as_str().into())); - } - )* - None - } - }; -} - -define_rules! { - EmptyLine = r""; - ModuleDoc = r"//![^\n\r]*"; - Comment = r"//[^\n\r]*"; - CrateUse = r"use +crate( *:: *[\w]+)*( +as +[\w]+)?"; - CrateUseStar = r"use +crate( *:: *[\w*]+)*"; - CratePubUse = r"pub +use +crate( *:: *[\w]+)*( +as +[\w]+)?"; - CratePubUseStar = r"pub +use +crate( *:: *[\w*]+)*"; - Use = r"use +[\w]+( *:: *[\w]+)*( +as +[\w]+)?"; - UseStar = r"use +[\w]+( *:: *[\w*]+)*"; - PubUse = r"pub +use +[\w]+( *:: *[\w]+)*( +as +[\w]+)?"; - PubUseStar = r"pub +use +[\w]+( *:: *[\w*]+)*"; - ModuleAttribFeature = r"#!\[feature[^\]]*\]"; - ModuleAttribAllowIncFeat = r"#!\[allow\(incomplete_features\)\]"; - ModuleAttribWarn = r"#!\[warn[^\]]*\]"; - ModuleAttribAllow = r"#!\[allow[^\]]*\]"; - ModuleAttribDeny = r"#!\[deny[^\]]*\]"; - ModuleAttrib = r"#!\[[^\]]*\]"; - Attrib = r"#\[[^\]]*\]"; - PubMod = r"pub +mod +[\w]+"; -} - - - -// ======================= -// === Pretty printing === -// ======================= - -/// Prints H1 section if any of the provided tokens was used in the file being formatted. -fn print_h1( - out: &mut String, - map: &HashMap>, - tokens: &[HeaderToken], - str: &str, -) { - if tokens.iter().any(|tok| map.contains_key(tok)) { - out.push('\n'); - out.push_str(&format!("// ===={}====\n", "=".repeat(str.len()))); - out.push_str(&format!("// === {} ===\n", str)); - out.push_str(&format!("// ===={}====\n", "=".repeat(str.len()))); - out.push('\n'); - } -} - -/// Prints H2 section if any of the provided tokens was used in the file being formatted. -fn print_h2( - out: &mut String, - map: &HashMap>, - tokens: &[HeaderToken], - str: &str, -) { - if tokens.iter().map(|tok| map.contains_key(tok)).any(|t| t) { - out.push_str(&format!("// === {} ===\n", str)); - } -} - -/// Prints all the entries associated with the provided tokens. If at least one entry was printed, -/// an empty line will be added in the end. -fn print(out: &mut String, map: &mut HashMap>, t: &[HeaderToken]) -> bool { - // We collect the results because we want all tokens to be printed. - let sub_results: Vec = t.iter().map(|t| print_single(out, map, *t)).collect(); - sub_results.iter().any(|t| *t) -} - -/// Prints all the entries associated with the provided tokens. If at least one entry was printed, -/// an empty line will be added in the end. -fn print_section(out: &mut String, map: &mut HashMap>, t: &[HeaderToken]) { - if print(out, map, t) { - out.push('\n'); - } -} - -/// Print all the entries associated with the provided token. -fn print_single( - out: &mut String, - map: &mut HashMap>, - token: HeaderToken, -) -> bool { - match map.remove(&token) { - None => false, - Some(t) => { - out.push_str(&t.join("")); - true - } - } -} - - - -// ============== -// === Action === -// ============== - -/// Possible commands this formatter can evaluate. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -#[allow(missing_docs)] -pub enum Action { - Format, - DryRun, - FormatAndCheck, -} - - - -// ================== -// === Processing === -// ================== - -/// A path to rust source annottated with information whether it is a main or a library main source -/// file. -#[derive(Clone, Debug)] -#[allow(missing_docs)] -pub struct RustSourcePath { - path: PathBuf, - is_main: bool, -} - -/// Process all files of the given path recursively. -/// -/// Please note that the [`hash_map`] variable contains hashes of all files before processing. After -/// the processing is done by this formatter, `rustfmt` is run on the whole codebase, and the hashes -/// are compared with new files. This allows checking if running the formatting changed the files. -/// An alternative design is possible – we could run this formatter and pass its output to stdin of -/// `rustfmt`, run it in memory, and get the results without affecting files on the disk. -/// Unfortunately, such solution requires either running a separate `rustfmt` process per file, or -/// using its API. The former solution is very slow (16 seconds for the whole codebase), the second -/// uses non-documented API and is slow as well (8 seconds for the whole codebase). It should be -/// possible to improve the latter solution to get good performance, but it seems way harder than it -/// should be. -fn process_path(path: impl AsRef, action: Action) { - let paths = discover_paths(path); - let total = paths.len(); - let mut hash_map = HashMap::::new(); - for (i, sub_path) in paths.iter().enumerate() { - let dbg_msg = if sub_path.is_main { " [main]" } else { "" }; - println!("[{}/{}] Processing {}{}.", i + 1, total, sub_path.path.display(), dbg_msg); - let hash = process_file(&sub_path.path, action, sub_path.is_main); - hash_map.insert((&sub_path.path).into(), hash); - } - if action == Action::Format || action == Action::FormatAndCheck { - Command::new("cargo") - .arg("fmt") - .stdin(Stdio::null()) - .status() - .expect("'cargo fmt' failed to start.") - .exit_ok() - .unwrap(); - } - - if action == Action::FormatAndCheck { - let mut changed = Vec::new(); - for sub_path in &paths { - let (hash, _) = read_file_with_hash(&sub_path.path).unwrap(); - if hash_map.get(&sub_path.path) != Some(&hash) { - changed.push(sub_path.path.clone()); - } - } - if !changed.is_empty() { - panic!("{} files changed:\n{:#?}", changed.len(), changed); - } - } -} - -/// Discover all paths containing Rust sources, recursively. -fn discover_paths(path: impl AsRef) -> Vec { - let mut vec = Vec::default(); - discover_paths_internal(&mut vec, path, false); - vec -} - -fn discover_paths_internal( - vec: &mut Vec, - path: impl AsRef, - is_main_dir: bool, -) { - let path = path.as_ref(); - let md = fs::metadata(path); - let md = md.unwrap_or_else(|_| panic!("Could get metadata of {}", path.display())); - if md.is_dir() && path.file_name() != Some(OsStr::new("target")) { - let dir_name = path.file_name(); - // FIXME: This should cover 'tests' folder also, but only the files that contain actual - // tests. Otherwise, not all attributes are allowed there. - let is_main_dir = dir_name == Some(OsStr::new("bin")); // || dir_name == Some(OsStr::new("tests")); - let sub_paths = fs::read_dir(path).unwrap(); - for sub_path in sub_paths { - discover_paths_internal(vec, &sub_path.unwrap().path(), is_main_dir) - } - } else if md.is_file() && path.extension() == Some(OsStr::new("rs")) { - let file_name = path.file_name().and_then(|s| s.to_str()); - let is_main_file = file_name == Some("lib.rs") || file_name == Some("main.rs"); - let is_main = is_main_file || is_main_dir; - let path = path.into(); - vec.push(RustSourcePath { path, is_main }); - } -} - -fn process_file(path: impl AsRef, action: Action, is_main_file: bool) -> u64 { - let path = path.as_ref(); - let (hash, input) = read_file_with_hash(path).unwrap(); - - match process_file_content(input, is_main_file) { - Err(e) => panic!("{:?}: {}", path, e), - Ok(out) => { - if action == Action::DryRun { - println!("{}", out) - } else if action == Action::Format || action == Action::FormatAndCheck { - fs::write(path, out).expect("Unable to write back to the source file.") - } - hash - } - } -} - -/// Process a single source file. -fn process_file_content(input: String, is_main_file: bool) -> Result { - let mut str_ptr: &str = &input; - let mut attrs = vec![]; - let mut header = vec![]; - loop { - match match_header(str_ptr) { - None => break, - Some(mut m) => { - str_ptr = &str_ptr[m.len()..]; - match m.token { - Attrib => attrs.push(m), - _ => { - if !attrs.is_empty() { - let old_attrs = std::mem::take(&mut attrs); - m.attrs = old_attrs.into_iter().map(|t| t.reg_match).collect(); - } - header.push(m) - } - } - } - } - } - - // Do not consume the trailing comments. - let mut ending: Vec<&HeaderElement> = header - .iter() - .rev() - .take_while(|t| (t.token == Comment) || (t.token == EmptyLine)) - .collect(); - ending.reverse(); - let incorrect_ending_len = ending.into_iter().skip_while(|t| t.token == EmptyLine).count(); - header.truncate(header.len() - incorrect_ending_len); - let total_len: usize = header.iter().map(|t| t.len()).sum(); - - // Mark comments before any definitions as module comments. - header - .iter_mut() - .take_while(|t| (t.token == Comment) || (t.token == EmptyLine) || (t.token == ModuleDoc)) - .map(|t| { - if t.token == Comment && !t.reg_match.starts_with("// ===") { - t.token = ModuleComment; - } - }) - .for_each(drop); - - // Error if the import section contains comments. - let contains_comments = - header.iter().find(|t| t.token == Comment && !t.reg_match.starts_with("// ===")); - if let Some(comment) = contains_comments { - return Err(format!( - "File contains comments in the import section. This is not allowed:\n{}", - comment.reg_match - )); - } - - // Error if the star import is used for non prelude- or traits-like imports. - // TODO: This is commented for now because it requires several non-trival changes in the code. - // let invalid_star_import = header.iter().any(|t| { - // t.token == UseStar - // && !t.reg_match.contains("prelude::*") - // && !t.reg_match.contains("traits::*") - // && !t.reg_match.contains("super::*") - // }); - // - // if invalid_star_import { - // Err("Star imports only allowed for `prelude`, `traits`, and `super` - // modules.".to_string())?; } - - // Build a mapping between tokens and registered entries. - let mut map = HashMap::>::new(); - for elem in header { - map.entry(elem.token).or_default().push(elem.to_string()); - } - - // Remove standard linter configuration from the configuration found in the file. - if is_main_file { - let vec = map.entry(ModuleAttribAllow).or_default(); - vec.retain(|t| !STD_LINTER_ATTRIBS.iter().map(|s| t.contains(s)).any(|b| b)); - if vec.is_empty() { - map.remove(&ModuleAttribAllow); - } - - let vec = map.entry(ModuleAttribDeny).or_default(); - vec.retain(|t| !STD_LINTER_ATTRIBS.iter().map(|s| t.contains(s)).any(|b| b)); - if vec.is_empty() { - map.remove(&ModuleAttribDeny); - } - - let vec = map.entry(ModuleAttribWarn).or_default(); - vec.retain(|t| !STD_LINTER_ATTRIBS.iter().map(|s| t.contains(s)).any(|b| b)); - if vec.is_empty() { - map.remove(&ModuleAttribWarn); - } - - let std_linter_attribs = STD_LINTER_ATTRIBS.iter().map(|t| format!("#![{}]\n", t)); - map.entry(StandardLinterConfig).or_default().extend(std_linter_attribs); - } - - // Print the results. - let mut out = String::new(); - print_section(&mut out, &mut map, &[ModuleDoc]); - print_section(&mut out, &mut map, &[ModuleComment]); - print_section(&mut out, &mut map, &[ModuleAttrib]); - print_h2(&mut out, &map, &[ModuleAttribAllowIncFeat, ModuleAttribFeature], "Features"); - print_section(&mut out, &mut map, &[ModuleAttribAllowIncFeat, ModuleAttribFeature]); - if !STD_LINTER_ATTRIBS.is_empty() { - print_h2(&mut out, &map, &[StandardLinterConfig], "Standard Linter Configuration"); - print_section(&mut out, &mut map, &[StandardLinterConfig]); - } - print_h2( - &mut out, - &map, - &[ModuleAttribAllow, ModuleAttribDeny, ModuleAttribWarn], - "Non-Standard Linter Configuration", - ); - print_section(&mut out, &mut map, &[ModuleAttribAllow, ModuleAttribDeny, ModuleAttribWarn]); - - print_section(&mut out, &mut map, &[CrateUseStar, UseStar]); - print_section(&mut out, &mut map, &[CrateUse]); - print_section(&mut out, &mut map, &[Use]); - - print_h1(&mut out, &map, &[PubMod, CratePubUseStar, PubUseStar, CratePubUse, PubUse], "Export"); - print_section(&mut out, &mut map, &[PubMod]); - print_section(&mut out, &mut map, &[CratePubUseStar, PubUseStar, CratePubUse, PubUse]); - out.push_str("\n\n"); - out.push_str(&input[total_len..]); - Ok(out) -} - -fn main() { - process_path(".", Action::Format); -} - - - -// ============= -// === Tests === -// ============= - -#[test] -fn test_formatting() { - let input = r#"//! Module-level documentation -//! written in two lines. - -#![warn(missing_copy_implementations)] -#![allow(incomplete_features)] -#![recursion_limit = "512"] -pub use lib_f::item_1; -pub mod mod1; -use crate::prelude::*; -use crate::lib_b; -use lib_c; -pub use crate::lib_e; -use crate::lib_a; -use lib_d::item_1; -use logger::traits::*; -pub mod mod2; -pub struct Struct1 {} -"#; - - let output = r#"//! Module-level documentation -//! written in two lines. - -#![recursion_limit = "512"] - -// === Features === -#![allow(incomplete_features)] - // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] -// === Non-Standard Linter Configuration === -#![warn(missing_copy_implementations)] - -use crate::prelude::*; -use logger::traits::*; - -use crate::lib_b; -use crate::lib_a; - -use lib_c; -use lib_d::item_1; - - -// ============== -// === Export === -// ============== - -pub mod mod1; -pub mod mod2; - -pub use crate::lib_e; -pub use lib_f::item_1; +use ide_ci::prelude::*; -pub struct Struct1 {} -"#; - assert_eq!(process_file_content(input.into(), true), Ok(output.into())); +#[tokio::main] +async fn main() -> Result { + setup_logging()?; + info!("Enso Formatter running in {}", ide_ci::env::current_dir()?.display()); + enso_formatter::process_path(".", enso_formatter::Action::Format).await } diff --git a/build/paths.js b/build/paths.js deleted file mode 100644 index 59148e5461..0000000000 --- a/build/paths.js +++ /dev/null @@ -1,78 +0,0 @@ -const path = require('path') -const os = require('os') - -// ============= -// === Paths === -// ============= - -let paths = {} - -paths.root = path.dirname(__dirname) - -paths.github = {} -paths.github.root = path.join(paths.root, '.github') -paths.github.workflows = path.join(paths.github.root, 'workflows') - -paths.script = {} -paths.script.main = path.join(paths.root, 'run') -paths.script.root = path.join(paths.root, 'build') -paths.script.run = path.join(paths.script.root, 'run') - -paths.dist = {} -paths.dist.root = path.join(paths.root, 'dist') -paths.dist.client = path.join(paths.dist.root, 'client') -paths.dist.content = path.join(paths.dist.root, 'content') -paths.dist.assets = path.join(paths.dist.content, 'assets') -paths.dist.packageJson = path.join(paths.dist.content, 'package.json') -paths.dist.preload = path.join(paths.dist.content, 'preload.js') -paths.dist.bin = path.join(paths.dist.root, 'bin') -paths.dist.init = path.join(paths.dist.root, 'init') -paths.dist.buildInit = path.join(paths.dist.root, 'build-init') -paths.dist.buildInfo = path.join(paths.dist.root, 'build.json') -paths.dist.tmp = path.join(paths.dist.root, 'tmp') - -const WASM_MAIN = 'ide.wasm' -const WASM_MAIN_RAW = 'ide_bg.wasm' -const WASM_GLUE = 'ide.js' - -// Final WASM artifacts in `dist` directory. -paths.dist.wasm = {} -paths.dist.wasm.root = path.join(paths.dist.root, 'wasm') -paths.dist.wasm.main = path.join(paths.dist.wasm.root, WASM_MAIN) -paths.dist.wasm.mainRaw = path.join(paths.dist.wasm.root, WASM_MAIN_RAW) -paths.dist.wasm.glue = path.join(paths.dist.wasm.root, WASM_GLUE) - -// Intermediate WASM artifacts. -paths.wasm = {} -paths.wasm.root = path.resolve(os.tmpdir(), 'enso-wasm') -paths.wasm.main = path.join(paths.wasm.root, WASM_MAIN) -paths.wasm.mainRaw = path.join(paths.wasm.root, WASM_MAIN_RAW) -paths.wasm.glue = path.join(paths.wasm.root, WASM_GLUE) -paths.wasm.mainGz = path.join(paths.wasm.root, 'ide.wasm.gz') - -paths.ide_desktop = {} -paths.ide_desktop.lib = {} -paths.ide_desktop.root = path.join(paths.root, 'app', 'ide-desktop') -paths.ide_desktop.lib.projectManager = path.join(paths.ide_desktop.root, 'lib', 'project-manager') -paths.ide_desktop.lib.content = path.join(paths.ide_desktop.root, 'lib', 'content') - -paths.gui = {} -paths.gui.root = path.join(paths.root, 'app', 'gui') - -function get_project_manager_extension() { - const target_platform = os.platform() - switch (target_platform) { - case 'win32': - return '.exe' - default: - return '' - } -} - -paths.get_project_manager_path = function (root) { - let base_path = path.join(root, 'enso', 'bin') - const extension = get_project_manager_extension() - return path.join(base_path, 'project-manager') + extension -} - -module.exports = paths diff --git a/build/prettier/package.json b/build/prettier/package.json index 7b8735a5a9..feb0fbd119 100644 --- a/build/prettier/package.json +++ b/build/prettier/package.json @@ -7,7 +7,7 @@ "prettier-plugin-toml": "^0.3.1" }, "scripts": { - "write": "cd ../../ && prettier --write .", - "check": "cd ../../ && prettier --check ." + "write": "cd ../.. && prettier --write .", + "check": "cd ../.. && prettier --check ." } } diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 0000000000..bf2ffdd0e3 --- /dev/null +++ b/clippy.toml @@ -0,0 +1 @@ +type-complexity-threshold = 500 diff --git a/integration-test/src/lib.rs b/integration-test/src/lib.rs index 731515f93c..b37c82417d 100644 --- a/integration-test/src/lib.rs +++ b/integration-test/src/lib.rs @@ -6,6 +6,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/lib/rust/automata/src/dfa.rs b/lib/rust/automata/src/dfa.rs index 7f8f31a5a3..d14ef77681 100644 --- a/lib/rust/automata/src/dfa.rs +++ b/lib/rust/automata/src/dfa.rs @@ -70,20 +70,22 @@ impl Dfa { } /// Convert the automata to GraphViz Dot code for the deubgging purposes. - pub fn as_graphviz_code(&self) -> String { + pub fn as_graphviz_code(&self) -> FallibleResult { + use std::fmt::Write; + let mut out = String::new(); for row in 0..self.links.rows { - out += &format!("node_{}[label=\"{}\"]\n", row, row); + writeln!(out, "node_{}[label=\"{}\"]", row, row)?; for column in 0..self.links.columns { let state = self.links[(row, column)]; if !state.is_invalid() { - out += &format!("node_{} -> node_{}\n", row, state.id()); + writeln!(out, "node_{} -> node_{}", row, state.id())?; } } } let opts = "node [shape=circle style=filled fillcolor=\"#4385f5\" fontcolor=\"#FFFFFF\" \ color=white penwidth=5.0 margin=0.1 width=0.5 height=0.5 fixedsize=true]"; - format!("digraph G {{\n{}\n{}\n}}\n", opts, out) + Ok(format!("digraph G {{\n{}\n{}\n}}\n", opts, out)) } } diff --git a/lib/rust/automata/src/lib.rs b/lib/rust/automata/src/lib.rs index 166a88a6b9..8d97f53462 100644 --- a/lib/rust/automata/src/lib.rs +++ b/lib/rust/automata/src/lib.rs @@ -6,6 +6,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/automata/src/nfa.rs b/lib/rust/automata/src/nfa.rs index fa8bcfc1cd..406f7e6acf 100644 --- a/lib/rust/automata/src/nfa.rs +++ b/lib/rust/automata/src/nfa.rs @@ -242,27 +242,30 @@ impl Nfa { } /// Convert the automata to a GraphViz Dot code for the deubgging purposes. - pub fn as_graphviz_code(&self) -> String { + pub fn as_graphviz_code(&self) -> FallibleResult { + use std::fmt::Write; + let mut out = String::new(); for (ix, state) in self.states.iter().enumerate() { let opts = if state.export { "" } else { "[fillcolor=\"#EEEEEE\" fontcolor=\"#888888\"]" }; - out += &format!("node_{}[label=\"{}\"]{}\n", ix, ix, opts); + writeln!(out, "node_{}[label=\"{}\"]{}", ix, ix, opts)?; for link in &state.links { - out += &format!( - "node_{} -> node_{}[label=\"{}\"]\n", + writeln!( + out, + "node_{} -> node_{}[label=\"{}\"]", ix, link.target.id(), link.display_symbols() - ); + )?; } for link in &state.epsilon_links { - out += &format!("node_{} -> node_{}[style=dashed]\n", ix, link.id()); + writeln!(out, "node_{} -> node_{}[style=dashed]", ix, link.id())?; } } let opts = "node [shape=circle style=filled fillcolor=\"#4385f5\" fontcolor=\"#FFFFFF\" \ color=white penwidth=5.0 margin=0.1 width=0.5 height=0.5 fixedsize=true]"; - format!("digraph G {{\n{}\n{}\n}}\n", opts, out) + Ok(format!("digraph G {{\n{}\n{}\n}}\n", opts, out)) } } @@ -298,7 +301,7 @@ pub mod tests { // === Test Utilities === #[allow(missing_docs)] - #[derive(Clone, Debug, Default, PartialEq)] + #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct NfaTest { pub nfa: Nfa, pub start_state_id: State, diff --git a/lib/rust/callback/src/lib.rs b/lib/rust/callback/src/lib.rs index 649770537f..3427d599ea 100644 --- a/lib/rust/callback/src/lib.rs +++ b/lib/rust/callback/src/lib.rs @@ -8,6 +8,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -173,13 +174,13 @@ impl RegistryFnNew for RegistryFnMut { impl> RegistryFnCall for RegistryFn { fn call(&self, args: Args) { - (&*self.function).call(args); + (*self.function).call(args); } } impl> RegistryFnCall for RegistryFnMut { fn call(&self, args: Args) { - (&mut *self.function.borrow_mut()).call_mut(args); + (*self.function.borrow_mut()).call_mut(args); } } @@ -422,7 +423,7 @@ impl DynEventDispatcher { let callback = Box::new(move |event: &DynEvent| { event.any.downcast_ref::().iter().for_each(|t| f(t)) }); - let type_id = (&PhantomData::).type_id(); + let type_id = PhantomData::.type_id(); let handle = Handle::default(); let guard = handle.guard(); let listeners = self.listener_map.entry(type_id).or_insert_with(default); diff --git a/lib/rust/code-builder/src/lib.rs b/lib/rust/code-builder/src/lib.rs index 3e1bf466df..3944ff1f28 100644 --- a/lib/rust/code-builder/src/lib.rs +++ b/lib/rust/code-builder/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] #![allow(incomplete_features)] // To be removed, see: https://github.com/enso-org/ide/issues/1559 #![allow(missing_docs)] diff --git a/lib/rust/config-reader/src/lib.rs b/lib/rust/config-reader/src/lib.rs index 581d8beadd..68fe5d7058 100644 --- a/lib/rust/config-reader/src/lib.rs +++ b/lib/rust/config-reader/src/lib.rs @@ -60,6 +60,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use inflector::*; @@ -79,6 +80,8 @@ use std::fs; /// - `CARGO_MANIFEST_DIR` or `OUT_DIR` env variable is missing. /// - The provided config file is not in the YAML format. pub fn generate_config_module_from_yaml(config_path: impl AsRef) { + use std::fmt::Write; + let f = std::fs::File::open(config_path.as_ref()).unwrap(); let value: Value = serde_yaml::from_reader(f).unwrap(); let manifest_dir = std::env::var("CARGO_MANIFEST_DIR") @@ -93,12 +96,14 @@ pub fn generate_config_module_from_yaml(config_path: impl AsRef for (key, value) in mapping { let key = key.as_str().unwrap().to_snake_case(); let value = value.as_str().unwrap(); - def.push_str(&format!("{}pub {}: &'static str,\n", indent, key)); - inst.push_str(&format!("{}{}: \"{}\",\n", indent, key, value)); - vars.push_str(&format!( - "#[allow(non_upper_case_globals)]\npub const {}: &str = \"{}\";\n", + writeln!(def, "{}pub {}: &'static str,", indent, key).unwrap(); + writeln!(inst, "{}{}: \"{}\",", indent, key, value).unwrap(); + writeln!( + vars, + "#[allow(non_upper_case_globals)]\npub const {}: &str = \"{}\";", key, value - )); + ) + .unwrap(); }, _ => panic!("Unexpected config format."), } diff --git a/lib/rust/data-structures/src/diet.rs b/lib/rust/data-structures/src/diet.rs index aafba8238c..0ff0f7853d 100644 --- a/lib/rust/data-structures/src/diet.rs +++ b/lib/rust/data-structures/src/diet.rs @@ -143,12 +143,14 @@ pub struct $name { impl $name { - /// Create an empty data array. This function is safe because the intervals are build out of - /// [`uint`]s, which can be initialized from raw memory. Follow the link to learn more: - /// https://doc.rust-lang.org/std/mem/union.MaybeUninit.html. - #[allow(unsafe_code)] + /// Create an empty data array. pub (crate) fn empty_data_array() -> DataArray { - unsafe { MaybeUninit::uninit().assume_init() } + // FIXME: Original implementation used: + // `unsafe { MaybeUninit::uninit().assume_init() }` + // However, a doubt was raised whether this is correct or UB. + // The current implementation might suffer from performance issues. + // See: https://github.com/enso-org/enso/pull/3694#discussion_r987216167 + [default();$num] } /// Create an empty data array. This function uses unsafe Rust to initialize big arrays element diff --git a/lib/rust/data-structures/src/lib.rs b/lib/rust/data-structures/src/lib.rs index cda804a3a1..3d9daaa69f 100644 --- a/lib/rust/data-structures/src/lib.rs +++ b/lib/rust/data-structures/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/debug-api/src/lib.rs b/lib/rust/debug-api/src/lib.rs index 543001080e..775b24ba82 100644 --- a/lib/rust/debug-api/src/lib.rs +++ b/lib/rust/debug-api/src/lib.rs @@ -5,6 +5,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/lib/rust/ensogl/app/theme/derive/src/lib.rs b/lib/rust/ensogl/app/theme/derive/src/lib.rs index fd215f7640..0644eca99f 100644 --- a/lib/rust/ensogl/app/theme/derive/src/lib.rs +++ b/lib/rust/ensogl/app/theme/derive/src/lib.rs @@ -26,7 +26,6 @@ // === Features === #![allow(incomplete_features)] #![feature(associated_type_defaults)] -#![feature(bool_to_option)] #![feature(cell_update)] #![feature(const_type_id)] #![feature(drain_filter)] @@ -43,6 +42,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/ensogl/app/theme/hardcoded/src/lib.rs b/lib/rust/ensogl/app/theme/hardcoded/src/lib.rs index fa37ae82ff..ef662e0288 100644 --- a/lib/rust/ensogl/app/theme/hardcoded/src/lib.rs +++ b/lib/rust/ensogl/app/theme/hardcoded/src/lib.rs @@ -4,6 +4,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/lib/rust/ensogl/component/button/src/lib.rs b/lib/rust/ensogl/component/button/src/lib.rs index 17b780246e..bf34d16d44 100644 --- a/lib/rust/ensogl/component/button/src/lib.rs +++ b/lib/rust/ensogl/component/button/src/lib.rs @@ -72,6 +72,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/drop-down-menu/src/lib.rs b/lib/rust/ensogl/component/drop-down-menu/src/lib.rs index 5ad7006514..5a5da4335f 100644 --- a/lib/rust/ensogl/component/drop-down-menu/src/lib.rs +++ b/lib/rust/ensogl/component/drop-down-menu/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -152,6 +153,7 @@ impl Model { } fn set_label(&self, label: &str) { + #[allow(clippy::needless_borrow)] // Removing the borrow breaks type inference. self.label.set_cursor(&default()); self.label.select_all(); self.label.insert(label); diff --git a/lib/rust/ensogl/component/drop-manager/src/lib.rs b/lib/rust/ensogl/component/drop-manager/src/lib.rs index 0b6819679c..a839d2b368 100644 --- a/lib/rust/ensogl/component/drop-manager/src/lib.rs +++ b/lib/rust/ensogl/component/drop-manager/src/lib.rs @@ -4,6 +4,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/ensogl/component/file-browser/src/lib.rs b/lib/rust/ensogl/component/file-browser/src/lib.rs index ca3f5e450b..cc0e702e59 100644 --- a/lib/rust/ensogl/component/file-browser/src/lib.rs +++ b/lib/rust/ensogl/component/file-browser/src/lib.rs @@ -8,6 +8,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/flame-graph/src/lib.rs b/lib/rust/ensogl/component/flame-graph/src/lib.rs index 93612c5091..2080236955 100644 --- a/lib/rust/ensogl/component/flame-graph/src/lib.rs +++ b/lib/rust/ensogl/component/flame-graph/src/lib.rs @@ -4,6 +4,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/grid-view/src/lib.rs b/lib/rust/ensogl/component/grid-view/src/lib.rs index 7b92749042..c977a72102 100644 --- a/lib/rust/ensogl/component/grid-view/src/lib.rs +++ b/lib/rust/ensogl/component/grid-view/src/lib.rs @@ -24,10 +24,10 @@ #![feature(trait_alias)] #![feature(hash_drain_filter)] #![feature(type_alias_impl_trait)] -#![feature(bool_to_option)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -647,15 +647,15 @@ impl application::View for GridView { fn default_shortcuts() -> Vec { use application::shortcut::ActionType::*; - (&[ + [ (PressAndRepeat, "up", "move_selection_up"), (PressAndRepeat, "down", "move_selection_down"), (PressAndRepeat, "left", "move_selection_left"), (PressAndRepeat, "right", "move_selection_right"), - ]) - .iter() - .map(|(a, b, c)| Self::self_shortcut_when(*a, *b, *c, "focused")) - .collect() + ] + .iter() + .map(|(a, b, c)| Self::self_shortcut_when(*a, *b, *c, "focused")) + .collect() } } diff --git a/lib/rust/ensogl/component/gui/src/lib.rs b/lib/rust/ensogl/component/gui/src/lib.rs index d11b373035..26e4318ac8 100644 --- a/lib/rust/ensogl/component/gui/src/lib.rs +++ b/lib/rust/ensogl/component/gui/src/lib.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/label/src/lib.rs b/lib/rust/ensogl/component/label/src/lib.rs index e64cca6ebb..5dca134da3 100644 --- a/lib/rust/ensogl/component/label/src/lib.rs +++ b/lib/rust/ensogl/component/label/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/list-view/src/lib.rs b/lib/rust/ensogl/component/list-view/src/lib.rs index eddffac0e2..b61f66bd18 100644 --- a/lib/rust/ensogl/component/list-view/src/lib.rs +++ b/lib/rust/ensogl/component/list-view/src/lib.rs @@ -10,6 +10,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -722,7 +723,7 @@ impl application::View for ListView { } fn default_shortcuts() -> Vec { use shortcut::ActionType::*; - (&[ + [ (PressAndRepeat, "up", "move_selection_up"), (PressAndRepeat, "down", "move_selection_down"), (Press, "page-up", "move_selection_page_up"), @@ -730,10 +731,10 @@ impl application::View for ListView { (Press, "home", "move_selection_to_first"), (Press, "end", "move_selection_to_last"), (Press, "enter", "chose_selected_entry"), - ]) - .iter() - .map(|(a, b, c)| Self::self_shortcut_when(*a, *b, *c, "focused")) - .collect() + ] + .iter() + .map(|(a, b, c)| Self::self_shortcut_when(*a, *b, *c, "focused")) + .collect() } } diff --git a/lib/rust/ensogl/component/scroll-area/src/lib.rs b/lib/rust/ensogl/component/scroll-area/src/lib.rs index 6fb7847c74..af6644f9d9 100644 --- a/lib/rust/ensogl/component/scroll-area/src/lib.rs +++ b/lib/rust/ensogl/component/scroll-area/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -234,7 +235,7 @@ impl Deref for ScrollArea { impl display::Object for ScrollArea { fn display_object(&self) -> &display::object::Instance { - &*self.model.display_object + &self.model.display_object } } diff --git a/lib/rust/ensogl/component/scrollbar/src/lib.rs b/lib/rust/ensogl/component/scrollbar/src/lib.rs index d212bdb878..c025c81814 100644 --- a/lib/rust/ensogl/component/scrollbar/src/lib.rs +++ b/lib/rust/ensogl/component/scrollbar/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/selector/src/lib.rs b/lib/rust/ensogl/component/selector/src/lib.rs index e7df0a0b17..d128f10c40 100644 --- a/lib/rust/ensogl/component/selector/src/lib.rs +++ b/lib/rust/ensogl/component/selector/src/lib.rs @@ -16,6 +16,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/selector/src/shape.rs b/lib/rust/ensogl/component/selector/src/shape.rs index 02a1ea3933..931a26461c 100644 --- a/lib/rust/ensogl/component/selector/src/shape.rs +++ b/lib/rust/ensogl/component/selector/src/shape.rs @@ -138,7 +138,7 @@ pub mod track { struct OverflowShape { #[allow(dead_code)] // This field is not used but should stay as part of the API for future use. - pub width: Var, + pub width: Var, #[allow(dead_code)] // This field is not used but should stay as part of the API for future use. pub height: Var, @@ -154,7 +154,7 @@ impl OverflowShape { let height = &sprite_height - shadow::size(style).px(); let overflow_color = style.get_color(theme::component::slider::overflow::color); let shape = Triangle(&sprite_height / 6.0, &sprite_height / 6.0); - let shape = shape.fill(&overflow_color); + let shape = shape.fill(overflow_color); let hover_area = Circle(&height); let hover_area = hover_area.fill(HOVER_COLOR); diff --git a/lib/rust/ensogl/component/sequence-diagram/src/lib.rs b/lib/rust/ensogl/component/sequence-diagram/src/lib.rs index dcd4f18f29..cb5d0d9668 100644 --- a/lib/rust/ensogl/component/sequence-diagram/src/lib.rs +++ b/lib/rust/ensogl/component/sequence-diagram/src/lib.rs @@ -4,6 +4,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -150,7 +151,7 @@ impl Model { line.set_cap(cap); let height_rows = - (message.recipient.id as i32 - message.sender.id as i32).abs() as u32; + (message.recipient.id as i32 - message.sender.id as i32).unsigned_abs(); let height_px = ROW_HEIGHT * height_rows as f32; let start = message.recipient.id.min(message.sender.id) as u32; line.set_size(Vector2::new(LINE_WIDTH, height_px)); diff --git a/lib/rust/ensogl/component/shadow/src/lib.rs b/lib/rust/ensogl/component/shadow/src/lib.rs index 2e162e934b..11ab412881 100644 --- a/lib/rust/ensogl/component/shadow/src/lib.rs +++ b/lib/rust/ensogl/component/shadow/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/src/lib.rs b/lib/rust/ensogl/component/src/lib.rs index 6c22745ce9..a180aca22c 100644 --- a/lib/rust/ensogl/component/src/lib.rs +++ b/lib/rust/ensogl/component/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] diff --git a/lib/rust/ensogl/component/text/src/buffer/formatting.rs b/lib/rust/ensogl/component/text/src/buffer/formatting.rs index 3dcff95585..906fa47c4f 100644 --- a/lib/rust/ensogl/component/text/src/buffer/formatting.rs +++ b/lib/rust/ensogl/component/text/src/buffer/formatting.rs @@ -27,6 +27,10 @@ pub use font::Width; macro_rules! def_unit { ($name:ident($field_type:ty) = $def:expr) => { /// Formatting property. + // We don't know what types this struct will be instantiated with. So, sometimes we might + // not be able to derive Eq because of floats, but other structs might not use floats, and + // will then be flagged by clippy. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, Copy, Debug, From, PartialEq, PartialOrd)] #[allow(missing_docs)] pub struct $name { diff --git a/lib/rust/ensogl/component/text/src/buffer/movement.rs b/lib/rust/ensogl/component/text/src/buffer/movement.rs index bab09a3e5c..11d831dc00 100644 --- a/lib/rust/ensogl/component/text/src/buffer/movement.rs +++ b/lib/rust/ensogl/component/text/src/buffer/movement.rs @@ -13,7 +13,7 @@ use crate::buffer::selection::Selection; // ================= /// Selection transformation patterns. Used for the needs of keyboard and mouse interaction. -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Transform { /// Select all text. All, diff --git a/lib/rust/ensogl/component/text/src/component/text.rs b/lib/rust/ensogl/component/text/src/component/text.rs index 501eb47505..93c8c2b610 100644 --- a/lib/rust/ensogl/component/text/src/component/text.rs +++ b/lib/rust/ensogl/component/text/src/component/text.rs @@ -736,7 +736,7 @@ impl TextModel { let fonts = scene.extension::(); let font = fonts.load(font::DEFAULT_FONT_MONO); let glyph_system = { - let glyph_system = font::glyph::System::new(&scene, font); + let glyph_system = font::glyph::System::new(scene, font); display_object.add_child(&glyph_system); RefCell::new(glyph_system) }; @@ -1421,7 +1421,7 @@ impl TextModel { }); if truncated { - let divs = (&divs[0..divs.len() - to_be_truncated]).to_vec(); + let divs = divs[0..divs.len() - to_be_truncated].to_vec(); let divs = NonEmptyVec::try_from(divs).unwrap_or_else(|_| default_divs()); line.set_divs(divs); line.glyphs.truncate(column.value - to_be_truncated); @@ -1717,7 +1717,7 @@ impl TextModel { let scene = &app.display.default_scene; let fonts = scene.extension::(); let font = fonts.load(font_name); - let glyph_system = font::glyph::System::new(&scene, font); + let glyph_system = font::glyph::System::new(scene, font); self.display_object.add_child(&glyph_system); let old_glyph_system = self.glyph_system.replace(glyph_system); self.display_object.remove_child(&old_glyph_system); @@ -1983,7 +1983,7 @@ impl application::View for Text { fn default_shortcuts() -> Vec { use shortcut::ActionType::*; - (&[ + [ (PressAndRepeat, "left", "cursor_move_left"), (PressAndRepeat, "right", "cursor_move_right"), (PressAndRepeat, "up", "cursor_move_up"), @@ -2030,14 +2030,14 @@ impl application::View for Text { (Press, "cmd v", "paste"), (Press, "cmd z", "undo"), (Press, "escape", "keep_oldest_cursor_only"), - ]) - .iter() - .map(|(action, rule, command)| { - let only_hovered = *action != Release && rule.contains("left-mouse-button"); - let condition = if only_hovered { "focused & hovered" } else { "focused" }; - Self::self_shortcut_when(*action, *rule, *command, condition) - }) - .collect() + ] + .iter() + .map(|(action, rule, command)| { + let only_hovered = *action != Release && rule.contains("left-mouse-button"); + let condition = if only_hovered { "focused & hovered" } else { "focused" }; + Self::self_shortcut_when(*action, *rule, *command, condition) + }) + .collect() } } diff --git a/lib/rust/ensogl/component/text/src/font.rs b/lib/rust/ensogl/component/text/src/font.rs index 9c6e0cacc4..500a5601d4 100644 --- a/lib/rust/ensogl/component/text/src/font.rs +++ b/lib/rust/ensogl/component/text/src/font.rs @@ -305,7 +305,7 @@ impl NonVariableFamily { /// ignored. fn load_all_faces(&self, embedded: &Embedded) { for (header, file_name) in &self.definition.map { - if let Some(face) = Face::load_from_memory(&*file_name, embedded) { + if let Some(face) = Face::load_from_memory(file_name, embedded) { self.faces.borrow_mut().insert(*header, face); } } diff --git a/lib/rust/ensogl/component/text/src/font/embedded/Cargo.toml b/lib/rust/ensogl/component/text/src/font/embedded/Cargo.toml index 300b81d144..16e82e399c 100644 --- a/lib/rust/ensogl/component/text/src/font/embedded/Cargo.toml +++ b/lib/rust/ensogl/component/text/src/font/embedded/Cargo.toml @@ -13,9 +13,10 @@ enso-prelude = { path = "../../../../../../prelude" } ensogl-text-font-family = { path = "../../font/family" } [build-dependencies] -enso-build = { git = "https://github.com/enso-org/ci-build", branch = "develop" } -enso-build-utilities = { path = "../../../../../../../../build/build-utils" } +ide-ci = { path = "../../../../../../../../build/ci_utils" } +enso-build = { path = "../../../../../../../../build/build" } +enso-build-utilities = { path = "../../../../../../../../build/deprecated/build-utils" } ensogl-text-font-family = { path = "../../font/family" } -tokio = { version = "1.19.2", features = ["macros"] } +tokio = { workspace = true } zip = { version = "0.5" } owned_ttf_parser = "0.15.1" diff --git a/lib/rust/ensogl/component/text/src/font/embedded/build.rs b/lib/rust/ensogl/component/text/src/font/embedded/build.rs index 81823c02e7..10311813de 100644 --- a/lib/rust/ensogl/component/text/src/font/embedded/build.rs +++ b/lib/rust/ensogl/component/text/src/font/embedded/build.rs @@ -3,17 +3,12 @@ // === Features === #![feature(const_trait_impl)] +use ide_ci::prelude::*; + +use ide_ci::log::setup_logging; use owned_ttf_parser::AsFaceRef; use owned_ttf_parser::OwnedFace; -use std::env; -use std::fmt::Write; -use std::fs; -use std::fs::File; -use std::io; -use std::io::BufReader; -use std::io::Read; -use std::io::Write as IoWrite; -use std::path; +use std::fmt::Write as FmtWrite; @@ -75,11 +70,6 @@ impl CodeGenerator { ln!(0, body, "}}"); body } - - fn write>(&self, path: P) -> io::Result<()> { - let mut file = fs::File::create(path)?; - writeln!(file, "{}", self.body()) - } } @@ -89,10 +79,10 @@ impl CodeGenerator { // =================== mod deja_vu { - use crate::CodeGenerator; + use super::*; + use crate::CodeGenerator; use enso_build_utilities::GithubRelease; - use std::path; pub const PACKAGE: GithubRelease<&str> = GithubRelease { project_url: "https://github.com/dejavu-fonts/dejavu-fonts/", @@ -102,31 +92,33 @@ mod deja_vu { pub const PACKAGE_FONTS_PREFIX: &str = "dejavu-fonts-ttf-2.37/ttf"; - pub fn extract_font(package_path: &path::Path, file_name: &str) { - let font_in_package_path = format!("{}/{}", PACKAGE_FONTS_PREFIX, file_name); - let package_dir = package_path.parent().unwrap(); - let output_path = package_dir.join(file_name); - - let archive_file = std::fs::File::open(package_path).unwrap(); - let mut archive = zip::ZipArchive::new(archive_file).unwrap(); - let mut input_stream = archive.by_name(font_in_package_path.as_str()).unwrap(); - let mut output_stream = std::fs::File::create(output_path).unwrap(); - std::io::copy(&mut input_stream, &mut output_stream).unwrap(); - } - const FILE_NAMES: [&str; 4] = ["DejaVuSans.ttf", "DejaVuSans-Bold.ttf", "DejaVuSansMono.ttf", "DejaVuSansMono-Bold.ttf"]; - pub fn extract_all_fonts(package_path: &path::Path) { + pub fn extract_all_fonts(package_path: &Path) -> Result { + let archive_file = ide_ci::fs::open(package_path)?; + let mut archive = zip::ZipArchive::new(archive_file).unwrap(); for file_name in FILE_NAMES { - extract_font(package_path, file_name); + let font_in_package_path = format!("{}/{}", PACKAGE_FONTS_PREFIX, file_name); + let mut input_stream = archive.by_name(&font_in_package_path).with_context(|| { + format!( + "Cannot find font file {} in the package {}", + file_name, + package_path.display() + ) + })?; + let output_path = package_path.with_file_name(file_name); + let mut output_stream = ide_ci::fs::create(&output_path)?; + std::io::copy(&mut input_stream, &mut output_stream).with_context(|| { + format!("Cannot extract font file {} to {}", file_name, output_path.display()) + })?; } + Ok(()) } - pub fn download_and_extract_all_fonts(out_dir: &path::Path) { - let package_path = out_dir.join(PACKAGE.filename); - PACKAGE.download(out_dir); - extract_all_fonts(package_path.as_path()); + pub async fn download_and_extract_all_fonts(out_dir: &Path) -> Result { + let package_path = ide_ci::io::download_to_dir(PACKAGE.url()?, out_dir).await?; + extract_all_fonts(package_path.as_path()) } pub fn add_entries_to_fill_map_rs(file: &mut CodeGenerator) { @@ -146,7 +138,7 @@ mod google_fonts { use super::*; use crate::CodeGenerator; - use std::path; + use enso_build::ide::web::download_google_font; #[derive(Debug)] pub struct FaceDefinition { @@ -162,41 +154,31 @@ mod google_fonts { pub async fn download_files( name: impl AsRef, - out_dir: &path::Path, - ) -> Vec { - let octocrab = enso_build::setup_octocrab().await.expect("Failed to setup GitHub client."); - let result = enso_build::ide::web::download_google_font(&octocrab, name.as_ref(), out_dir) - .await - .expect("Failed ot download font."); - result.into_iter().map(|content| DownloadedFile { name: content.name }).collect() + out_dir: &Path, + ) -> Result> { + let octocrab = enso_build::setup_octocrab().await?; + let result = download_google_font(&octocrab, name.as_ref(), out_dir).await?; + Ok(result.into_iter().map(|content| DownloadedFile { name: content.name }).collect()) } - pub async fn load(out_dir: &path::Path, buffer: &mut CodeGenerator, family_name: &str) { - let files = download_files(family_name, out_dir).await; + pub async fn load(out_dir: &Path, buffer: &mut CodeGenerator, family_name: &str) -> Result { + let files = download_files(family_name, out_dir).await?; for file in &files { buffer.add_font_data(&file.name) } - let font_faces: Vec = files - .into_iter() - .map(|file| { - let file_name = file.name; - let path = out_dir.join(&file_name); - let err = |action: &str| format!("Cannot {} file {:?}", action, path); - let handle = File::open(&path).unwrap_or_else(|_| panic!("{}", err("read"))); - let mut reader = BufReader::new(handle); - let mut bytes = Vec::new(); - reader.read_to_end(&mut bytes).unwrap_or_else(|_| panic!("{}", err("read"))); - let face = OwnedFace::from_vec(bytes, 0); - let face = face.unwrap_or_else(|_| panic!("{}", err("parse"))); - FaceDefinition { file_name, face } - }) - .collect(); + let font_faces: Vec = files.into_iter().try_map(|file| { + let file_name = file.name; + let path = out_dir.join(&file_name); + let bytes = ide_ci::fs::read(&path)?; + let face = OwnedFace::from_vec(bytes, 0) + .with_context(|| format!("Cannot load font file {}.", path.display()))?; + Ok(FaceDefinition { file_name, face }) + })?; - if font_faces.is_empty() { - panic!("No font faces found for family {}.", family_name); - } else if font_faces.len() == 1 && font_faces[0].face.as_face_ref().is_variable() { + ensure!(!font_faces.is_empty(), "No font files were downloaded for family {family_name}.",); + if font_faces.len() == 1 && font_faces[0].face.as_face_ref().is_variable() { let file_name = &font_faces[0].file_name; buffer.add_variable_font_definition(family_name, file_name); } else { @@ -205,7 +187,7 @@ mod google_fonts { let err2 = "This is intentionally not supported."; let err3 = "CSS does not support it either,"; let err4 = "see: https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face"; - panic!("Family {} {} {} {} {}", family_name, err1, err2, err3, err4); + bail!("Family {} {} {} {} {}", family_name, err1, err2, err3, err4); } let mut code = String::new(); let fam_def = "family::Definition::NonVariable"; @@ -224,7 +206,8 @@ mod google_fonts { } ln!(1, code, "]))"); buffer.add_non_variable_font_definition(family_name, &code); - } + }; + Ok(()) } } @@ -234,17 +217,20 @@ mod google_fonts { // === Main === // ============ #[tokio::main] -async fn main() { +async fn main() -> Result { println!("cargo:rerun-if-changed=build.rs"); - let out = env::var("OUT_DIR").unwrap(); - let out_dir = path::Path::new(&out); - deja_vu::download_and_extract_all_fonts(out_dir); + setup_logging()?; + let out_dir = ide_ci::programs::cargo::build_env::OUT_DIR.get()?; + deja_vu::download_and_extract_all_fonts(&out_dir).await?; let mut code_gen = CodeGenerator::default(); - google_fonts::load(out_dir, &mut code_gen, "mplus1").await; - google_fonts::load(out_dir, &mut code_gen, "mplus1p").await; + google_fonts::load(&out_dir, &mut code_gen, "mplus1").await?; + google_fonts::load(&out_dir, &mut code_gen, "mplus1p").await?; - let out_path = out_dir.join("embedded_fonts_data.rs"); deja_vu::add_entries_to_fill_map_rs(&mut code_gen); - code_gen.write(out_path).unwrap(); + + let body = code_gen.body(); + let out_path = out_dir.join("embedded_fonts_data.rs"); + ide_ci::fs::tokio::write(&out_path, body).await?; + Ok(()) } diff --git a/lib/rust/ensogl/component/text/src/font/embedded/src/lib.rs b/lib/rust/ensogl/component/text/src/font/embedded/src/lib.rs index 0950e2d21f..42500dfe2d 100644 --- a/lib/rust/ensogl/component/text/src/font/embedded/src/lib.rs +++ b/lib/rust/ensogl/component/text/src/font/embedded/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/ensogl/component/text/src/font/family/src/lib.rs b/lib/rust/ensogl/component/text/src/font/family/src/lib.rs index 7e863ca5fa..749ec466b5 100644 --- a/lib/rust/ensogl/component/text/src/font/family/src/lib.rs +++ b/lib/rust/ensogl/component/text/src/font/family/src/lib.rs @@ -13,7 +13,6 @@ // === Features === #![allow(incomplete_features)] #![feature(associated_type_defaults)] -#![feature(bool_to_option)] #![feature(cell_update)] #![feature(const_type_id)] #![feature(drain_filter)] @@ -30,6 +29,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/ensogl/component/text/src/font/glyph.rs b/lib/rust/ensogl/component/text/src/font/glyph.rs index 7e2260d3fc..fa5eee528e 100644 --- a/lib/rust/ensogl/component/text/src/font/glyph.rs +++ b/lib/rust/ensogl/component/text/src/font/glyph.rs @@ -424,6 +424,7 @@ impl System { let scene = scene.as_ref(); let sprite_system = SpriteSystem::new(scene); let symbol = sprite_system.symbol(); + #[allow(clippy::let_unit_value)] let context = get_context(scene); let texture = new_texture(&context, (0, 0)); let mesh = symbol.surface(); @@ -449,8 +450,7 @@ impl System { /// may be set. pub fn new_glyph(&self) -> Glyph { let frp = Frp::new(); - #[allow(clippy::clone_on_copy)] - #[allow(clippy::unit_arg)] + #[allow(clippy::clone_on_copy, clippy::let_unit_value, clippy::unit_arg)] let context = self.context.clone(); let display_object = display::object::Instance::new(); let sprite = self.sprite_system.new_instance(); diff --git a/lib/rust/ensogl/component/text/src/font/msdf/Cargo.toml b/lib/rust/ensogl/component/text/src/font/msdf/Cargo.toml index ea76322953..8943867fc7 100644 --- a/lib/rust/ensogl/component/text/src/font/msdf/Cargo.toml +++ b/lib/rust/ensogl/component/text/src/font/msdf/Cargo.toml @@ -25,4 +25,6 @@ ensogl-text-embedded-fonts = { path = "../../../src/font/embedded" } ensogl-text-font-family = { path = "../../../src/font/family" } [build-dependencies] -enso-build-utilities = { path = "../../../../../../../../build/build-utils" } +ide-ci = { path = "../../../../../../../../build/ci_utils" } +enso-build-utilities = { path = "../../../../../../../../build/deprecated/build-utils" } +tokio = { workspace = true } diff --git a/lib/rust/ensogl/component/text/src/font/msdf/build.rs b/lib/rust/ensogl/component/text/src/font/msdf/build.rs index 45189febe4..19ad0db116 100644 --- a/lib/rust/ensogl/component/text/src/font/msdf/build.rs +++ b/lib/rust/ensogl/component/text/src/font/msdf/build.rs @@ -1,58 +1,51 @@ //! Downloader and patch for msdfgen library. +use ide_ci::prelude::*; + +use enso_build_utilities::GithubRelease; +use ide_ci::log::setup_logging; -mod msdfgen_wasm { - use enso_build_utilities::GithubRelease; - use std::fs; - use std::io::Write; - use std::path; +pub const PACKAGE: GithubRelease<&str> = GithubRelease { + project_url: "https://github.com/enso-org/msdfgen-wasm", + version: "v1.4", + filename: "msdfgen_wasm.js", +}; - pub const PACKAGE: GithubRelease<&str> = GithubRelease { - project_url: "https://github.com/enso-org/msdfgen-wasm", - version: "v1.4", - filename: "msdfgen_wasm.js", - }; +const PATCH_LINE: &str = + "; export { ccall, getValue, _msdfgen_getKerning, _msdfgen_setVariationAxis,\ + _msdfgen_generateAutoframedMSDF, _msdfgen_generateAutoframedMSDFByIndex, \ + _msdfgen_result_getMSDFData, _msdfgen_result_getAdvance, _msdfgen_result_getTranslation,\ + _msdfgen_result_getScale, _msdfgen_freeResult, _msdfgen_freeFont,\ + addInitializationCb, isInitialized }"; - /// Downloads the msdfgen package. - /// - /// **Note** - /// In theory, build.rs scripts should create and modify files in OUT_DIR only, but we haven't - /// found any way to make `#[wasm_bindgen(module="...")]` taking a file from OUT_DIR (except by - /// providing a full system path, which is obviously awful). - /// - /// If you find and implement a better way to downloading js snippets, please - /// remember to remove msdfgen_wasm.js entry from the .gitignore file. - pub fn download() { - PACKAGE.download(path::Path::new(".")) - } - - const PATCH_LINE: &str = - "; export { ccall, getValue, _msdfgen_getKerning, _msdfgen_setVariationAxis,\ - _msdfgen_generateAutoframedMSDF, _msdfgen_generateAutoframedMSDFByIndex, \ - _msdfgen_result_getMSDFData, _msdfgen_result_getAdvance, _msdfgen_result_getTranslation,\ - _msdfgen_result_getScale, _msdfgen_freeResult, _msdfgen_freeFont,\ - addInitializationCb, isInitialized }"; - - /// Patches downloaded msdfgen_wasm.js file. - /// - /// For some reason, for wasm-bindgen test on browsers the function must be explicitly exported. - /// Examples work without this perfectly. - pub fn patch_for_wasm_bindgen_test() { - let path = path::Path::new(&PACKAGE.filename); - let mut open_options = fs::OpenOptions::new(); - open_options.append(true); - let mut file = open_options.open(path).unwrap(); - let file_content = fs::read_to_string(path).unwrap(); - if !file_content.ends_with(PATCH_LINE) { - file.write_all(PATCH_LINE.as_bytes()).unwrap(); - } - } -} - -fn main() { +#[tokio::main] +async fn main() -> Result { println!("cargo:rerun-if-changed=build.rs"); - msdfgen_wasm::download(); - msdfgen_wasm::patch_for_wasm_bindgen_test(); + setup_logging()?; + + // Downloads the msdfgen package. + // + // **Note** + // In theory, build.rs scripts should create and modify files in OUT_DIR only, but we haven't + // found any way to make `#[wasm_bindgen(module="...")]` taking a file from OUT_DIR (except by + // providing a full system path, which is obviously awful). + // + // If you find and implement a better way to downloading js snippets, please + // remember to remove msdfgen_wasm.js entry from the .gitignore file. + let mut file = ide_ci::fs::tokio::create(PACKAGE.filename).await?; + let mut stream = ide_ci::io::web::download_reader(PACKAGE.url()?).await?; + tokio::io::copy(&mut stream, &mut file) + .await + .with_context(|| format!("Failed to stream download to file {}.", PACKAGE.filename))?; + + // Patch downloaded msdfgen_wasm.js file. + // + // For some reason, for wasm-bindgen test on browsers the function must be explicitly + // exported. Examples work without this perfectly. + file.write_all(PATCH_LINE.as_bytes()) + .await + .with_context(|| format!("Failed to write to file {}", PACKAGE.filename))?; + Ok(()) } diff --git a/lib/rust/ensogl/component/text/src/font/msdf/src/lib.rs b/lib/rust/ensogl/component/text/src/font/msdf/src/lib.rs index 33a851d370..bdecef752c 100644 --- a/lib/rust/ensogl/component/text/src/font/msdf/src/lib.rs +++ b/lib/rust/ensogl/component/text/src/font/msdf/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/ensogl/component/text/src/lib.rs b/lib/rust/ensogl/component/text/src/lib.rs index bf5194ede4..56d56afa60 100644 --- a/lib/rust/ensogl/component/text/src/lib.rs +++ b/lib/rust/ensogl/component/text/src/lib.rs @@ -9,7 +9,6 @@ #![feature(trait_alias)] #![feature(type_ascription)] #![feature(option_zip)] -#![feature(derive_default_enum)] #![feature(generators)] #![feature(btree_drain_filter)] #![feature(allocator_api)] @@ -18,6 +17,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/toggle-button/src/lib.rs b/lib/rust/ensogl/component/toggle-button/src/lib.rs index 91e466e8ae..bfa4e258b1 100644 --- a/lib/rust/ensogl/component/toggle-button/src/lib.rs +++ b/lib/rust/ensogl/component/toggle-button/src/lib.rs @@ -7,6 +7,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/component/tooltip/src/lib.rs b/lib/rust/ensogl/component/tooltip/src/lib.rs index 69e7b08539..6687080b9b 100644 --- a/lib/rust/ensogl/component/tooltip/src/lib.rs +++ b/lib/rust/ensogl/component/tooltip/src/lib.rs @@ -4,6 +4,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use ensogl::prelude::*; diff --git a/lib/rust/ensogl/core/src/animation/easing.rs b/lib/rust/ensogl/core/src/animation/easing.rs index bb2d90a96d..c977594965 100644 --- a/lib/rust/ensogl/core/src/animation/easing.rs +++ b/lib/rust/ensogl/core/src/animation/easing.rs @@ -409,7 +409,8 @@ impl WeakAnimationLoop { pub type AnimationStep = animation::Loop; /// Callback for an animation step. -pub type Step = impl Fn(animation::TimeInfo); +pub type Step, OnEnd: Callback> = + impl Fn(animation::TimeInfo); fn step( easing: &Animator, diff --git a/lib/rust/ensogl/core/src/animation/frp/animation.rs b/lib/rust/ensogl/core/src/animation/frp/animation.rs index 03df9eec17..022a6736f1 100644 --- a/lib/rust/ensogl/core/src/animation/frp/animation.rs +++ b/lib/rust/ensogl/core/src/animation/frp/animation.rs @@ -136,7 +136,8 @@ where mix::Repr: inertia::Value #[derivative(Clone(bound = ""))] #[allow(missing_docs)] #[allow(non_camel_case_types)] -pub struct DEPRECATED_Animation { +pub struct DEPRECATED_Animation +where ::Repr: inertia::Value { #[shrinkwrap(main_field)] pub simulator: inertia::DynSimulator, pub value: frp::Stream, diff --git a/lib/rust/ensogl/core/src/animation/frp/easing.rs b/lib/rust/ensogl/core/src/animation/frp/easing.rs index 4c696163d4..238063a6a9 100644 --- a/lib/rust/ensogl/core/src/animation/frp/easing.rs +++ b/lib/rust/ensogl/core/src/animation/frp/easing.rs @@ -17,6 +17,7 @@ crate::define_endpoints! { set_duration (f32), target (f32), stop_and_rewind (f32), + skip (), } Output { value (f32), @@ -61,43 +62,8 @@ impl Easing { eval frp.set_duration ((t) animator.set_duration((*t).ms())); eval frp.target ((t) animator.from_now_to(*t)); eval frp.stop_and_rewind ((t) animator.stop_and_rewind_to(*t)); + eval_ frp.skip (animator.skip()); } self } } - - - -// ======================== -// === DEPRECATED_Tween === -// ======================== - -/// Smart tween handler. Contains tween animator and frp endpoint. Whenever a new value is computed, -/// it is emitted via the endpoint. -/// -/// # DEPRECATION -/// This component is deprecated. Use `Easing` instead, which exposes much more FRP-oriented API -/// than this component. -#[derive(Clone, CloneRef, Debug, Shrinkwrap)] -#[allow(missing_docs)] -#[allow(non_camel_case_types)] -pub struct DEPRECATED_Tween { - #[shrinkwrap(main_field)] - pub animator: easing::DynAnimator, - pub value: frp::Stream, -} - -impl DEPRECATED_Tween { - /// Constructor. - pub fn new(network: &frp::Network) -> Self { - frp::extend! { network - def target = source::(); - } - let f = easing::quad_in_out(); - let on_step = Box::new(f!((t) target.emit(t))); - let on_end = Box::new(|_| {}); - let animator = easing::DynAnimator::new_not_started(0.0, 1.0, f, on_step, on_end); - let value = target.into(); - Self { animator, value } - } -} diff --git a/lib/rust/ensogl/core/src/animation/physics/inertia.rs b/lib/rust/ensogl/core/src/animation/physics/inertia.rs index ae5e6b1d4b..a4f41503f6 100644 --- a/lib/rust/ensogl/core/src/animation/physics/inertia.rs +++ b/lib/rust/ensogl/core/src/animation/physics/inertia.rs @@ -828,7 +828,8 @@ impl WeakAnimationLoopSlot { // ========================== /// Callback for an animation step. -pub type Step = impl Fn(animation::TimeInfo); +pub type Step, OnStart: Callback0, OnEnd: Callback1> = + impl Fn(animation::TimeInfo); fn step( simulator: &Simulator, @@ -850,7 +851,12 @@ where } /// Callback for an animation step. -pub type OnTooManyFramesSkipped = impl Fn(); +pub type OnTooManyFramesSkipped< + T: Value, + OnStep: Callback1, + OnStart: Callback0, + OnEnd: Callback1, +> = impl Fn(); fn on_too_many_frames_skipped( simulator: &Simulator, diff --git a/lib/rust/ensogl/core/src/data/color/data.rs b/lib/rust/ensogl/core/src/data/color/data.rs index ace0ae07df..517b7466df 100644 --- a/lib/rust/ensogl/core/src/data/color/data.rs +++ b/lib/rust/ensogl/core/src/data/color/data.rs @@ -26,7 +26,7 @@ use nalgebra::Vector4; /// just want it, for example to match the behavior of color mixing in web browsers, which is /// broken for many years already: /// https://stackoverflow.com/questions/60179850/webgl-2-0-canvas-blending-with-html-in-linear-color-space -#[derive(Clone, Copy, Default, PartialEq)] +#[derive(Clone, Copy, Default, PartialEq, Eq)] pub struct Color { /// The underlying color representation. It is either `Alpha` or a color space instance. pub data: D, diff --git a/lib/rust/ensogl/core/src/data/dirty.rs b/lib/rust/ensogl/core/src/data/dirty.rs index daef80f577..70a3161fb3 100644 --- a/lib/rust/ensogl/core/src/data/dirty.rs +++ b/lib/rust/ensogl/core/src/data/dirty.rs @@ -522,7 +522,7 @@ impl<'t, Item: SetItem> IntoIterator for &'t SetData { type Item = &'t Item; type IntoIter = <&'t FxHashSet as IntoIterator>::IntoIter; fn into_iter(self) -> Self::IntoIter { - (&self.set).iter() + (self.set).iter() } } @@ -585,7 +585,7 @@ impl<'t, Item> IntoIterator for &'t VectorData { type Item = &'t Item; type IntoIter = <&'t Vec as IntoIterator>::IntoIter; fn into_iter(self) -> Self::IntoIter { - (&self.vec).iter() + (self.vec).iter() } } diff --git a/lib/rust/ensogl/core/src/display/object/class.rs b/lib/rust/ensogl/core/src/display/object/class.rs index 440550071c..a03fe5bd9d 100644 --- a/lib/rust/ensogl/core/src/display/object/class.rs +++ b/lib/rust/ensogl/core/src/display/object/class.rs @@ -851,7 +851,7 @@ impl Object for Instance { impl> Object for &T { fn display_object(&self) -> &Instance { - let t: &T = *self; + let t: &T = self; t.display_object() } } diff --git a/lib/rust/ensogl/core/src/display/render/passes/symbols.rs b/lib/rust/ensogl/core/src/display/render/passes/symbols.rs index d178b1d145..baece28c95 100644 --- a/lib/rust/ensogl/core/src/display/render/passes/symbols.rs +++ b/lib/rust/ensogl/core/src/display/render/passes/symbols.rs @@ -174,7 +174,7 @@ impl SymbolsRenderPass { } self.symbol_registry.set_camera(&layer.camera()); - self.symbol_registry.render_symbols(&*layer.symbols()); + self.symbol_registry.render_symbols(&layer.symbols()); for sublayer in layer.sublayers().iter() { self.render_layer(instance, sublayer, scissor_stack, was_ever_masked); } diff --git a/lib/rust/ensogl/core/src/display/style/sheet.rs b/lib/rust/ensogl/core/src/display/style/sheet.rs index d1005d544e..48870316de 100644 --- a/lib/rust/ensogl/core/src/display/style/sheet.rs +++ b/lib/rust/ensogl/core/src/display/style/sheet.rs @@ -1039,7 +1039,7 @@ mod tests { let sheet = Sheet::new(); sheet.set("button.size", data(1.0)); assert_query_sheet_count(&sheet, 0, 2); - sheet.set("circle.radius", Expression::new(&["button.size"], |args| args[0] + &data(10.0))); + sheet.set("circle.radius", Expression::new(["button.size"], |args| args[0] + &data(10.0))); assert_query_sheet_count(&sheet, 1, 4); assert_eq!(sheet.value("circle.radius"), Some(data(11.0))); sheet.unset("button.size"); @@ -1192,10 +1192,10 @@ mod tests { assert_eq!(style.query_value(query_graph_button_size), Some(&data(1.0))); style.set( "graph.button.size", - Expression::new(&["button.size"], |args| args[0] + &data(10.0)), + Expression::new(["button.size"], |args| args[0] + &data(10.0)), ); assert_eq!(style.query_value(query_graph_button_size), Some(&data(11.0))); - style.set("button.size", Expression::new(&["size"], |args| args[0] + &data(100.0))); + style.set("button.size", Expression::new(["size"], |args| args[0] + &data(100.0))); assert_eq!(style.query_value(query_graph_button_size), Some(&data(111.0))); style.set("size", data(2.0)); assert_eq!(style.query_value(query_graph_button_size), Some(&data(112.0))); @@ -1208,8 +1208,8 @@ mod tests { #[test] pub fn expr_circular() { let mut style = SheetData::new(); - style.set("a", Expression::new(&["b"], |args| args[0].clone())); - style.set("b", Expression::new(&["a"], |args| args[0].clone())); + style.set("a", Expression::new(["b"], |args| args[0].clone())); + style.set("b", Expression::new(["a"], |args| args[0].clone())); assert!(style.value("a").is_none()); assert!(style.value("b").is_none()); } diff --git a/lib/rust/ensogl/core/src/display/style/theme.rs b/lib/rust/ensogl/core/src/display/style/theme.rs index 2ac1cdfca7..31cd5ba6ec 100644 --- a/lib/rust/ensogl/core/src/display/style/theme.rs +++ b/lib/rust/ensogl/core/src/display/style/theme.rs @@ -360,5 +360,5 @@ pub fn test() { theme_manager.register("theme2", theme2); theme_manager.set_enabled(&["theme1".to_string()]); - theme_manager.set_enabled(&["theme1", "theme2"]); + theme_manager.set_enabled(["theme1", "theme2"]); } diff --git a/lib/rust/ensogl/core/src/display/symbol/gpu.rs b/lib/rust/ensogl/core/src/display/symbol/gpu.rs index 2d7071b045..47207f0f14 100644 --- a/lib/rust/ensogl/core/src/display/symbol/gpu.rs +++ b/lib/rust/ensogl/core/src/display/symbol/gpu.rs @@ -274,7 +274,7 @@ shared2! { GlobalInstanceIdProvider /// Attribute scope type. Attributes can be defined in one of the supported scopes and will be /// automatically bound to the material definition during shader compilation. -#[derive(Copy, Clone, Debug, PartialEq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[allow(missing_docs)] pub enum ScopeType { Mesh(mesh::ScopeType), diff --git a/lib/rust/ensogl/core/src/display/symbol/gpu/geometry/primitive/mesh.rs b/lib/rust/ensogl/core/src/display/symbol/gpu/geometry/primitive/mesh.rs index 4d3354f820..ad247d738a 100644 --- a/lib/rust/ensogl/core/src/display/symbol/gpu/geometry/primitive/mesh.rs +++ b/lib/rust/ensogl/core/src/display/symbol/gpu/geometry/primitive/mesh.rs @@ -50,7 +50,7 @@ pub struct Scopes { } /// A singleton for each of scope types. -#[derive(Copy, Clone, Debug, Display, IntoPrimitive, PartialEq)] +#[derive(Copy, Clone, Debug, Display, IntoPrimitive, PartialEq, Eq)] #[allow(missing_docs)] #[repr(u8)] pub enum ScopeType { diff --git a/lib/rust/ensogl/core/src/display/world.rs b/lib/rust/ensogl/core/src/display/world.rs index 3a5d286e5f..bab52ce580 100644 --- a/lib/rust/ensogl/core/src/display/world.rs +++ b/lib/rust/ensogl/core/src/display/world.rs @@ -97,7 +97,7 @@ impl World { impl Deref for World { type Target = WorldDataWithLoop; fn deref(&self) -> &Self::Target { - &*self.rc + &self.rc } } diff --git a/lib/rust/ensogl/core/src/gui/cursor.rs b/lib/rust/ensogl/core/src/gui/cursor.rs index 4bb28d35ed..ae41bef3b4 100644 --- a/lib/rust/ensogl/core/src/gui/cursor.rs +++ b/lib/rust/ensogl/core/src/gui/cursor.rs @@ -12,7 +12,7 @@ use crate::display::scene::Scene; use crate::frp; use crate::Animation; use crate::DEPRECATED_Animation; -use crate::DEPRECATED_Tween; +use crate::Easing; @@ -217,7 +217,7 @@ impl CursorModel { fn for_each_view(&self, f: impl Fn(&shape::View)) { for view in &[&self.view, &self.port_selection] { - f(*view) + f(view) } } } @@ -270,10 +270,10 @@ impl Cursor { let inactive_fade = DEPRECATED_Animation::::new(network); let host_position = DEPRECATED_Animation::::new(network); let host_follow_weight = DEPRECATED_Animation::::new(network); - let host_attached_weight = DEPRECATED_Tween::new(network); + let host_attached_weight = Easing::new(network); let port_selection_layer_weight = Animation::::new(network); - host_attached_weight.set_duration(300.0.ms()); + host_attached_weight.set_duration(300.0); color_lab.set_target_value(DEFAULT_COLOR.opaque.into()); color_alpha.set_target_value(DEFAULT_COLOR.alpha); radius.set_target_value(DEFAULT_RADIUS); @@ -303,8 +303,8 @@ impl Cursor { }); eval frp.set_style([host_attached_weight,size,offset,model] (new_style) { - host_attached_weight.stop_and_rewind(); - if new_style.host.is_some() { host_attached_weight.start() } + host_attached_weight.stop_and_rewind(0.0); + if new_style.host.is_some() { host_attached_weight.target(1.0) } let def = 0.0; match &new_style.press { diff --git a/lib/rust/ensogl/core/src/gui/style.rs b/lib/rust/ensogl/core/src/gui/style.rs index e1db2ac50f..c5320ffeac 100644 --- a/lib/rust/ensogl/core/src/gui/style.rs +++ b/lib/rust/ensogl/core/src/gui/style.rs @@ -72,7 +72,11 @@ macro_rules! define_style {( $( $(#$meta:tt)* $field:ident : $field_type:ty),* $ /// that cursor does not implement any complex style management (like pushing or popping a style /// from a style stack) on purpose, as it is stateful, while it is straightforward to implement /// it in FRP. - #[derive(Debug,Clone,Default,PartialEq)] + // We don't know what types this struct will be instantiated with. So, sometimes we might not be + // able to derive Eq because of floats, but other structs might not use floats, and will then be + // flagged by clippy. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Debug, Clone, Default, PartialEq)] pub struct Style { $($(#$meta)? $field : Option>),* } diff --git a/lib/rust/ensogl/core/src/lib.rs b/lib/rust/ensogl/core/src/lib.rs index 022557fd61..16f8c63c8d 100644 --- a/lib/rust/ensogl/core/src/lib.rs +++ b/lib/rust/ensogl/core/src/lib.rs @@ -7,7 +7,6 @@ #![allow(incomplete_features)] #![feature(negative_impls)] #![feature(associated_type_defaults)] -#![feature(bool_to_option)] #![feature(cell_update)] #![feature(const_type_id)] #![feature(drain_filter)] @@ -25,6 +24,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] @@ -55,7 +55,6 @@ pub mod system; pub use animation::Animation; pub use animation::DEPRECATED_Animation; -pub use animation::DEPRECATED_Tween; pub use animation::Easing; pub use enso_frp as frp; pub use enso_types as types; diff --git a/lib/rust/ensogl/core/src/system/gpu/context/native.rs b/lib/rust/ensogl/core/src/system/gpu/context/native.rs index d0c4632db0..0d443ff9c0 100644 --- a/lib/rust/ensogl/core/src/system/gpu/context/native.rs +++ b/lib/rust/ensogl/core/src/system/gpu/context/native.rs @@ -123,7 +123,7 @@ impl BlockingGetErrorLog for WebGl2RenderingContext { let code_with_num = lines_with_num.join("\n"); let error_loc_pfx = "ERROR: 0:"; let preview_code = if let Some(msg) = message.strip_prefix(error_loc_pfx) { - let line_num: String = msg.chars().take_while(|c| c.is_digit(10)).collect(); + let line_num: String = msg.chars().take_while(|c| c.is_ascii_digit()).collect(); let line_num = line_num.parse::().unwrap() - 1; let preview_radius = 5; let preview_line_start = std::cmp::max(0, line_num - preview_radius); diff --git a/lib/rust/ensogl/core/src/system/gpu/data/texture/class.rs b/lib/rust/ensogl/core/src/system/gpu/data/texture/class.rs index 651a66bf7f..8c93b45fe9 100644 --- a/lib/rust/ensogl/core/src/system/gpu/data/texture/class.rs +++ b/lib/rust/ensogl/core/src/system/gpu/data/texture/class.rs @@ -94,7 +94,7 @@ pub struct MagFilter(GlEnum); impl Deref for MagFilter { type Target = u32; fn deref(&self) -> &Self::Target { - &*self.0 + &self.0 } } @@ -121,7 +121,7 @@ pub struct MinFilter(GlEnum); impl Deref for MinFilter { type Target = u32; fn deref(&self) -> &Self::Target { - &*self.0 + &self.0 } } @@ -151,7 +151,7 @@ pub struct Wrap(GlEnum); impl Deref for Wrap { type Target = u32; fn deref(&self) -> &Self::Target { - &*self.0 + &self.0 } } diff --git a/lib/rust/ensogl/core/src/system/gpu/shader/compiler.rs b/lib/rust/ensogl/core/src/system/gpu/shader/compiler.rs index ad2d1b271d..14484c628a 100644 --- a/lib/rust/ensogl/core/src/system/gpu/shader/compiler.rs +++ b/lib/rust/ensogl/core/src/system/gpu/shader/compiler.rs @@ -345,7 +345,7 @@ impl CompilerData { trace!("Running KHR parallel shader compilation check job."); let jobs = &mut self.jobs.khr_completion_check; let ready_jobs = - jobs.drain_filter(|job| match job.khr.is_ready(&*self.context, &*job.program) { + jobs.drain_filter(|job| match job.khr.is_ready(&self.context, &job.program) { Some(val) => val, None => { if !self.context.is_context_lost() { @@ -385,8 +385,8 @@ impl CompilerData { let program = this.context.create_program().ok_or(Error::ProgramCreationError)?; let profiler = job.profiler; profiler.resume(); - this.context.attach_shader(&program, &*shader.vertex); - this.context.attach_shader(&program, &*shader.fragment); + this.context.attach_shader(&program, &shader.vertex); + this.context.attach_shader(&program, &shader.fragment); this.context.link_program(&program); profiler.pause(); let input = shader::Program::new(shader, program); @@ -409,7 +409,7 @@ impl CompilerData { let program = job.input; let param = WebGl2RenderingContext::LINK_STATUS; job.profiler.resume(); - let status = this.context.get_program_parameter(&*program, param); + let status = this.context.get_program_parameter(&program, param); job.profiler.finish(); if !status.as_bool().unwrap_or(false) { return Err(Error::ProgramLinkingError(program.shader)); diff --git a/lib/rust/ensogl/example/animation/src/lib.rs b/lib/rust/ensogl/example/animation/src/lib.rs index b8d393889a..9218d7e24a 100644 --- a/lib/rust/ensogl/example/animation/src/lib.rs +++ b/lib/rust/ensogl/example/animation/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/complex-shape-system/src/lib.rs b/lib/rust/ensogl/example/complex-shape-system/src/lib.rs index 46ad62c493..2415ef13db 100644 --- a/lib/rust/ensogl/example/complex-shape-system/src/lib.rs +++ b/lib/rust/ensogl/example/complex-shape-system/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use ensogl_core::display::shape::*; diff --git a/lib/rust/ensogl/example/custom-shape-system/src/lib.rs b/lib/rust/ensogl/example/custom-shape-system/src/lib.rs index 89ff6c7d09..3507a70f3d 100644 --- a/lib/rust/ensogl/example/custom-shape-system/src/lib.rs +++ b/lib/rust/ensogl/example/custom-shape-system/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use ensogl_core::display::shape::*; diff --git a/lib/rust/ensogl/example/dom-symbols/src/lib.rs b/lib/rust/ensogl/example/dom-symbols/src/lib.rs index e66438a135..a5ece6212a 100644 --- a/lib/rust/ensogl/example/dom-symbols/src/lib.rs +++ b/lib/rust/ensogl/example/dom-symbols/src/lib.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/drop-manager/src/lib.rs b/lib/rust/ensogl/example/drop-manager/src/lib.rs index 3dde2db3b0..a827fd84ac 100644 --- a/lib/rust/ensogl/example/drop-manager/src/lib.rs +++ b/lib/rust/ensogl/example/drop-manager/src/lib.rs @@ -12,6 +12,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/easing-animator/src/lib.rs b/lib/rust/ensogl/example/easing-animator/src/lib.rs index 60b137d6b1..373915004d 100644 --- a/lib/rust/ensogl/example/easing-animator/src/lib.rs +++ b/lib/rust/ensogl/example/easing-animator/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/grid-view/src/lib.rs b/lib/rust/ensogl/example/grid-view/src/lib.rs index b484f15519..3d5a975ba3 100644 --- a/lib/rust/ensogl/example/grid-view/src/lib.rs +++ b/lib/rust/ensogl/example/grid-view/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -171,9 +172,9 @@ fn pair_to_vec2(pair: (f32, f32)) -> Vector2 { // ======================== fn init(app: &Application) { - theme::builtin::dark::register(&app); - theme::builtin::light::register(&app); - theme::builtin::light::enable(&app); + theme::builtin::dark::register(app); + theme::builtin::light::register(app); + theme::builtin::light::enable(app); let main_layer = &app.display.default_scene.layers.node_searcher; let grids_layer = main_layer.create_sublayer(); diff --git a/lib/rust/ensogl/example/list-view/src/lib.rs b/lib/rust/ensogl/example/list-view/src/lib.rs index 3fba7b227d..bd9af3c6b7 100644 --- a/lib/rust/ensogl/example/list-view/src/lib.rs +++ b/lib/rust/ensogl/example/list-view/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -88,9 +89,9 @@ impl list_view::entry::ModelProvider fo // ======================== fn init(app: &Application) { - theme::builtin::dark::register(&app); - theme::builtin::light::register(&app); - theme::builtin::light::enable(&app); + theme::builtin::dark::register(app); + theme::builtin::light::register(app); + theme::builtin::light::enable(app); let list_view = app.new_view::>(); let provider = list_view::entry::AnyModelProvider::new(MockEntries::new(1000)); diff --git a/lib/rust/ensogl/example/mouse-events/src/lib.rs b/lib/rust/ensogl/example/mouse-events/src/lib.rs index 471ace416f..66b33db643 100644 --- a/lib/rust/ensogl/example/mouse-events/src/lib.rs +++ b/lib/rust/ensogl/example/mouse-events/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/profiling-run-graph/src/lib.rs b/lib/rust/ensogl/example/profiling-run-graph/src/lib.rs index bf3ff9531c..f635e23173 100644 --- a/lib/rust/ensogl/example/profiling-run-graph/src/lib.rs +++ b/lib/rust/ensogl/example/profiling-run-graph/src/lib.rs @@ -8,6 +8,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(unused_qualifications)] diff --git a/lib/rust/ensogl/example/render-profile-flamegraph/src/lib.rs b/lib/rust/ensogl/example/render-profile-flamegraph/src/lib.rs index 2ec5658c24..5b0850d660 100644 --- a/lib/rust/ensogl/example/render-profile-flamegraph/src/lib.rs +++ b/lib/rust/ensogl/example/render-profile-flamegraph/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/scroll-area/src/lib.rs b/lib/rust/ensogl/example/scroll-area/src/lib.rs index f071618f4d..f90dfe9d59 100644 --- a/lib/rust/ensogl/example/scroll-area/src/lib.rs +++ b/lib/rust/ensogl/example/scroll-area/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -82,9 +83,9 @@ mod background { // ======================== fn init(app: &Application) { - theme::builtin::dark::register(&app); - theme::builtin::light::register(&app); - theme::builtin::light::enable(&app); + theme::builtin::dark::register(app); + theme::builtin::light::register(app); + theme::builtin::light::enable(app); let scene = &app.display.default_scene; scene.camera().set_position_xy(Vector2(100.0, -100.0)); diff --git a/lib/rust/ensogl/example/shape-system/src/lib.rs b/lib/rust/ensogl/example/shape-system/src/lib.rs index bb6e709e3e..0c99d21a4e 100644 --- a/lib/rust/ensogl/example/shape-system/src/lib.rs +++ b/lib/rust/ensogl/example/shape-system/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/slider/src/lib.rs b/lib/rust/ensogl/example/slider/src/lib.rs index 8cabaf2a92..98cc4dabb3 100644 --- a/lib/rust/ensogl/example/slider/src/lib.rs +++ b/lib/rust/ensogl/example/slider/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] @@ -70,9 +71,9 @@ fn make_range_picker(app: &Application) -> Leak { // ======================== fn init(app: &Application) { - theme::builtin::dark::register(&app); - theme::builtin::light::register(&app); - theme::builtin::light::enable(&app); + theme::builtin::dark::register(app); + theme::builtin::light::register(app); + theme::builtin::light::enable(app); let slider1 = make_number_picker(app); slider1.inner().frp.allow_click_selection(true); diff --git a/lib/rust/ensogl/example/sprite-system-benchmark/src/lib.rs b/lib/rust/ensogl/example/sprite-system-benchmark/src/lib.rs index 3e6c538f4f..c14ee24f31 100644 --- a/lib/rust/ensogl/example/sprite-system-benchmark/src/lib.rs +++ b/lib/rust/ensogl/example/sprite-system-benchmark/src/lib.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/sprite-system/src/lib.rs b/lib/rust/ensogl/example/sprite-system/src/lib.rs index b2276d85ea..91b8216ad9 100644 --- a/lib/rust/ensogl/example/sprite-system/src/lib.rs +++ b/lib/rust/ensogl/example/sprite-system/src/lib.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/src/lib.rs b/lib/rust/ensogl/example/src/lib.rs index 787184e556..1d999ab9d9 100644 --- a/lib/rust/ensogl/example/src/lib.rs +++ b/lib/rust/ensogl/example/src/lib.rs @@ -14,6 +14,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/example/text-area/src/lib.rs b/lib/rust/ensogl/example/text-area/src/lib.rs index 13f1c6ef27..c547924192 100644 --- a/lib/rust/ensogl/example/text-area/src/lib.rs +++ b/lib/rust/ensogl/example/text-area/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/ensogl/src/lib.rs b/lib/rust/ensogl/src/lib.rs index 8dd88bde27..fb36636d0c 100644 --- a/lib/rust/ensogl/src/lib.rs +++ b/lib/rust/ensogl/src/lib.rs @@ -4,6 +4,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] diff --git a/lib/rust/frp/src/lib.rs b/lib/rust/frp/src/lib.rs index d3dfff44a1..d1afa141bd 100644 --- a/lib/rust/frp/src/lib.rs +++ b/lib/rust/frp/src/lib.rs @@ -146,6 +146,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] #![allow(incomplete_features)] // To be removed, see: https://github.com/enso-org/ide/issues/1559 #![warn(missing_copy_implementations)] diff --git a/lib/rust/fuzzly/src/lib.rs b/lib/rust/fuzzly/src/lib.rs index d3f628d091..a27f9a3d73 100644 --- a/lib/rust/fuzzly/src/lib.rs +++ b/lib/rust/fuzzly/src/lib.rs @@ -13,6 +13,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/lib/rust/generics/src/lib.rs b/lib/rust/generics/src/lib.rs index cc5afdb751..8a8da2e720 100644 --- a/lib/rust/generics/src/lib.rs +++ b/lib/rust/generics/src/lib.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/json-rpc/src/lib.rs b/lib/rust/json-rpc/src/lib.rs index da8d8f7832..5aaab7e680 100644 --- a/lib/rust/json-rpc/src/lib.rs +++ b/lib/rust/json-rpc/src/lib.rs @@ -6,6 +6,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/lib/rust/json-rpc/src/macros.rs b/lib/rust/json-rpc/src/macros.rs index 1877e8f86b..ad81220d02 100644 --- a/lib/rust/json-rpc/src/macros.rs +++ b/lib/rust/json-rpc/src/macros.rs @@ -123,7 +123,7 @@ macro_rules! make_rpc_methods { $( /// Structure transporting method arguments. - #[derive(Serialize,Debug,PartialEq)] + #[derive(Serialize,Debug,PartialEq, Eq)] #[serde(rename_all="camelCase")] struct $method_input<'a> { #[serde(skip)] diff --git a/lib/rust/json-rpc/src/messages.rs b/lib/rust/json-rpc/src/messages.rs index 6c1da39a93..ff1970993b 100644 --- a/lib/rust/json-rpc/src/messages.rs +++ b/lib/rust/json-rpc/src/messages.rs @@ -15,7 +15,7 @@ use shrinkwraprs::Shrinkwrap; // =============== /// All JSON-RPC messages bear `jsonrpc` version number. -#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] #[derive(Shrinkwrap)] pub struct Message { /// JSON-RPC Protocol version, should be 2.0. @@ -100,7 +100,7 @@ impl Message { pub struct Id(pub i64); /// JSON-RPC protocol version. Only 2.0 is supported. -#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq, Eq)] pub enum Version { /// JSON-RPC 2.0 specification. The supported version. #[serde(rename = "2.0")] @@ -111,7 +111,7 @@ pub enum Version { /// /// `Call` must be a type, that upon JSON serialization provides `method` and /// `params` fields, like `MethodCall`. -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] #[derive(Shrinkwrap)] pub struct Request { /// An identifier for this request that will allow matching the response. @@ -133,12 +133,12 @@ impl Request { /// /// `Call` must be a type, that upon JSON serialization provides `method` and /// `params` fields, like `MethodCall`. -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct Notification(pub Call); /// A response to a `Request`. Depending on `result` value it might be /// successful or not. -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct Response { /// Identifier, matching the value given in `Request` when call was made. pub id: Id, @@ -148,7 +148,7 @@ pub struct Response { } /// Result of the remote call — either a returned value or en error. -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] #[serde(untagged)] #[allow(missing_docs)] pub enum Result { @@ -176,14 +176,14 @@ impl Result { } /// Value yield by a successful remote call. -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct Success { /// A value returned from a successful remote call. pub result: Ret, } /// Error raised on a failed remote call. -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct Error { /// A number indicating what type of error occurred. pub code: i64, @@ -195,7 +195,7 @@ pub struct Error { /// A message that can come from Server to Client — either a response or /// notification. -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] #[serde(untagged)] pub enum IncomingMessage { /// A response to a call made by client. @@ -220,7 +220,7 @@ pub fn decode_incoming_message(message: &str) -> serde_json::Result { /// Name of the method that is being called. @@ -242,7 +242,7 @@ mod tests { use serde_json::Map; use serde_json::Value; - #[derive(Serialize, Deserialize, Debug, PartialEq)] + #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] struct MockRequest { number: i64, } diff --git a/lib/rust/json-rpc/tests/test.rs b/lib/rust/json-rpc/tests/test.rs index 5db91e018e..f171ed7c8b 100644 --- a/lib/rust/json-rpc/tests/test.rs +++ b/lib/rust/json-rpc/tests/test.rs @@ -44,7 +44,7 @@ fn pow_impl(msg: MockRequestMessage) -> MockResponseMessage { // === Protocol Data === -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] struct MockRequest { i: i64, } @@ -54,12 +54,12 @@ impl RemoteMethodCall for MockRequest { type Returned = MockResponse; } -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] struct MockResponse { result: i64, } -#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Eq)] #[serde(tag = "method", content = "params")] pub enum MockNotification { Meow { text: String }, diff --git a/lib/rust/launcher-shims/src/bin/launcher_000.rs b/lib/rust/launcher-shims/src/bin/launcher_000.rs index adc8db6596..b075e47c79 100644 --- a/lib/rust/launcher-shims/src/bin/launcher_000.rs +++ b/lib/rust/launcher-shims/src/bin/launcher_000.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use launcher_shims::wrap_launcher; diff --git a/lib/rust/launcher-shims/src/bin/launcher_001.rs b/lib/rust/launcher-shims/src/bin/launcher_001.rs index 583cf8036b..d9efbcd0e2 100644 --- a/lib/rust/launcher-shims/src/bin/launcher_001.rs +++ b/lib/rust/launcher-shims/src/bin/launcher_001.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use launcher_shims::wrap_launcher; diff --git a/lib/rust/launcher-shims/src/bin/launcher_002.rs b/lib/rust/launcher-shims/src/bin/launcher_002.rs index 57aefc18a3..21fcb97657 100644 --- a/lib/rust/launcher-shims/src/bin/launcher_002.rs +++ b/lib/rust/launcher-shims/src/bin/launcher_002.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use launcher_shims::wrap_launcher; diff --git a/lib/rust/launcher-shims/src/bin/launcher_003.rs b/lib/rust/launcher-shims/src/bin/launcher_003.rs index bc8806e7b6..8814ed4fa2 100644 --- a/lib/rust/launcher-shims/src/bin/launcher_003.rs +++ b/lib/rust/launcher-shims/src/bin/launcher_003.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use launcher_shims::wrap_launcher; diff --git a/lib/rust/launcher-shims/src/bin/launcher_004.rs b/lib/rust/launcher-shims/src/bin/launcher_004.rs index 343c3a8be6..05c8c6ccf3 100644 --- a/lib/rust/launcher-shims/src/bin/launcher_004.rs +++ b/lib/rust/launcher-shims/src/bin/launcher_004.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use launcher_shims::wrap_launcher; diff --git a/lib/rust/launcher-shims/src/lib.rs b/lib/rust/launcher-shims/src/lib.rs index 2257f14fb1..419d1768f9 100644 --- a/lib/rust/launcher-shims/src/lib.rs +++ b/lib/rust/launcher-shims/src/lib.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use std::io::prelude::*; diff --git a/lib/rust/logger/src/lib.rs b/lib/rust/logger/src/lib.rs index 7481ef29b8..3c9d7e0d5a 100644 --- a/lib/rust/logger/src/lib.rs +++ b/lib/rust/logger/src/lib.rs @@ -102,6 +102,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/macro-utils/src/lib.rs b/lib/rust/macro-utils/src/lib.rs index d9505656d2..8ce6309bc2 100644 --- a/lib/rust/macro-utils/src/lib.rs +++ b/lib/rust/macro-utils/src/lib.rs @@ -6,6 +6,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/lib/rust/metamodel/lexpr/src/lib.rs b/lib/rust/metamodel/lexpr/src/lib.rs index 093c9926a1..4230aae94a 100644 --- a/lib/rust/metamodel/lexpr/src/lib.rs +++ b/lib/rust/metamodel/lexpr/src/lib.rs @@ -39,6 +39,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/metamodel/src/lib.rs b/lib/rust/metamodel/src/lib.rs index 5fe5067c8b..ca546e7cd0 100644 --- a/lib/rust/metamodel/src/lib.rs +++ b/lib/rust/metamodel/src/lib.rs @@ -49,6 +49,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/metamodel/src/meta/serialization.rs b/lib/rust/metamodel/src/meta/serialization.rs index 0a6fd8d426..ff3da1f160 100644 --- a/lib/rust/metamodel/src/meta/serialization.rs +++ b/lib/rust/metamodel/src/meta/serialization.rs @@ -563,8 +563,8 @@ impl<'p> Interpreter<'p> { self.run_continuation(cont_stack, &mut prefix); } Op::U8(data) => prefix.push(*data), - Op::U32(data) => prefix.extend(&data.to_le_bytes()), - Op::U64(data) => prefix.extend(&data.to_le_bytes()), + Op::U32(data) => prefix.extend(data.to_le_bytes()), + Op::U64(data) => prefix.extend(data.to_le_bytes()), Op::Case(case) => { if DEBUG { match case { @@ -612,8 +612,8 @@ impl<'p> Interpreter<'p> { Op::SwitchPush => stack.push(self.continuations[&pc]), Op::SwitchPop => panic!("Fell through a switch at {pc}."), Op::U8(data) => out.push(*data), - Op::U32(data) => out.extend(&data.to_le_bytes()), - Op::U64(data) => out.extend(&data.to_le_bytes()), + Op::U32(data) => out.extend(data.to_le_bytes()), + Op::U64(data) => out.extend(data.to_le_bytes()), Op::Case(Case::Accept) => { if let Some(pc_) = stack.pop() { if DEBUG { diff --git a/lib/rust/optics/src/lib.rs b/lib/rust/optics/src/lib.rs index 946b6d62f8..f09f277022 100644 --- a/lib/rust/optics/src/lib.rs +++ b/lib/rust/optics/src/lib.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(non_snake_case)] diff --git a/lib/rust/parser/generate-java/src/bin/graph-java.rs b/lib/rust/parser/generate-java/src/bin/graph-java.rs index 04a6749c7d..9f0ce46697 100644 --- a/lib/rust/parser/generate-java/src/bin/graph-java.rs +++ b/lib/rust/parser/generate-java/src/bin/graph-java.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use enso_metamodel::graphviz; diff --git a/lib/rust/parser/generate-java/src/bin/graph-meta.rs b/lib/rust/parser/generate-java/src/bin/graph-meta.rs index 6a6b0f7069..8b89694253 100644 --- a/lib/rust/parser/generate-java/src/bin/graph-meta.rs +++ b/lib/rust/parser/generate-java/src/bin/graph-meta.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use enso_reflect::Reflect; diff --git a/lib/rust/parser/generate-java/src/bin/graph-rust.rs b/lib/rust/parser/generate-java/src/bin/graph-rust.rs index 24972a20c1..7151c1213c 100644 --- a/lib/rust/parser/generate-java/src/bin/graph-rust.rs +++ b/lib/rust/parser/generate-java/src/bin/graph-rust.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] diff --git a/lib/rust/parser/generate-java/src/bin/java-tests.rs b/lib/rust/parser/generate-java/src/bin/java-tests.rs index c6f99b91d7..1ac9471f1f 100644 --- a/lib/rust/parser/generate-java/src/bin/java-tests.rs +++ b/lib/rust/parser/generate-java/src/bin/java-tests.rs @@ -9,6 +9,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] diff --git a/lib/rust/parser/generate-java/src/lib.rs b/lib/rust/parser/generate-java/src/lib.rs index 78d01df258..f87ddc809d 100644 --- a/lib/rust/parser/generate-java/src/lib.rs +++ b/lib/rust/parser/generate-java/src/lib.rs @@ -4,6 +4,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/parser/generate-java/src/main.rs b/lib/rust/parser/generate-java/src/main.rs index 7304987796..3c4cc23162 100644 --- a/lib/rust/parser/generate-java/src/main.rs +++ b/lib/rust/parser/generate-java/src/main.rs @@ -10,6 +10,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/parser/jni/src/lib.rs b/lib/rust/parser/jni/src/lib.rs index c1a8aec00e..f0cd89954f 100644 --- a/lib/rust/parser/jni/src/lib.rs +++ b/lib/rust/parser/jni/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/parser/src/lexer.rs b/lib/rust/parser/src/lexer.rs index 134f24d858..dbefaf2a1c 100644 --- a/lib/rust/parser/src/lexer.rs +++ b/lib/rust/parser/src/lexer.rs @@ -1252,7 +1252,7 @@ impl<'s> Lexer<'s> { self.submit_token(Token(offset, "", eof)); } let mut internal_error = self.internal_error.take(); - if self.current_char != None { + if self.current_char.is_some() { let message = format!("Lexer did not consume all input. State: {self:?}"); internal_error.get_or_insert(message); } diff --git a/lib/rust/parser/src/lib.rs b/lib/rust/parser/src/lib.rs index b15d4a3218..60e0cbf55b 100644 --- a/lib/rust/parser/src/lib.rs +++ b/lib/rust/parser/src/lib.rs @@ -78,17 +78,17 @@ #![recursion_limit = "256"] // === Features === #![allow(incomplete_features)] +#![feature(let_chains)] #![feature(allocator_api)] #![feature(exact_size_is_empty)] #![feature(test)] #![feature(specialization)] -#![feature(let_chains)] -#![feature(let_else)] #![feature(if_let_guard)] #![feature(box_patterns)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] @@ -106,7 +106,6 @@ use crate::prelude::*; - // ============== // === Export === // ============== diff --git a/lib/rust/parser/src/macros/expand.rs b/lib/rust/parser/src/macros/expand.rs index 6218025129..373b356260 100644 --- a/lib/rust/parser/src/macros/expand.rs +++ b/lib/rust/parser/src/macros/expand.rs @@ -206,7 +206,7 @@ pub trait Validator: PartialEq + Default + CloneRef { } /// Disabled validator. See the docs of [`VarMap`] to learn more. -#[derive(Copy, Clone, CloneRef, Debug, Default, PartialEq)] +#[derive(Copy, Clone, CloneRef, Debug, Default, PartialEq, Eq)] pub struct DisabledValidator; /// Enabled validator. See the docs of [`VarMap`] to learn more. diff --git a/lib/rust/parser/src/main.rs b/lib/rust/parser/src/main.rs index 756dab2d52..95d905e1bc 100644 --- a/lib/rust/parser/src/main.rs +++ b/lib/rust/parser/src/main.rs @@ -7,11 +7,11 @@ #![feature(exact_size_is_empty)] #![feature(test)] #![feature(specialization)] -#![feature(let_chains)] #![feature(if_let_guard)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/parser/src/syntax/tree.rs b/lib/rust/parser/src/syntax/tree.rs index 07b91599d8..1c3fb8a40a 100644 --- a/lib/rust/parser/src/syntax/tree.rs +++ b/lib/rust/parser/src/syntax/tree.rs @@ -472,9 +472,9 @@ impl<'s> span::Builder<'s> for TextElement<'s> { impl<'s, 'a> TreeVisitable<'s, 'a> for VisibleOffset {} impl<'s, 'a> TreeVisitableMut<'s, 'a> for VisibleOffset {} -impl<'a, 't, 's> SpanVisitable<'s, 'a> for VisibleOffset {} -impl<'a, 't, 's> SpanVisitableMut<'s, 'a> for VisibleOffset {} -impl<'a, 't, 's> ItemVisitable<'s, 'a> for VisibleOffset {} +impl<'a, 's> SpanVisitable<'s, 'a> for VisibleOffset {} +impl<'a, 's> SpanVisitableMut<'s, 'a> for VisibleOffset {} +impl<'a, 's> ItemVisitable<'s, 'a> for VisibleOffset {} impl<'s> span::Builder<'s> for VisibleOffset { fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> { span @@ -1195,9 +1195,9 @@ crate::with_token_definition!(define_visitor_for_tokens()); impl<'s, 'a> TreeVisitable<'s, 'a> for u32 {} impl<'s, 'a> TreeVisitableMut<'s, 'a> for u32 {} -impl<'a, 't, 's> SpanVisitable<'s, 'a> for u32 {} -impl<'a, 't, 's> SpanVisitableMut<'s, 'a> for u32 {} -impl<'a, 't, 's> ItemVisitable<'s, 'a> for u32 {} +impl<'a, 's> SpanVisitable<'s, 'a> for u32 {} +impl<'a, 's> SpanVisitableMut<'s, 'a> for u32 {} +impl<'a, 's> ItemVisitable<'s, 'a> for u32 {} impl<'s> span::Builder<'s> for u32 { fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> { span @@ -1251,14 +1251,14 @@ impl<'s, 'a> SpanVisitableMut<'s, 'a> for Tree<'s> { } } -impl<'a, 't, 's, T> SpanVisitable<'s, 'a> for Token<'s, T> { +impl<'a, 's, T> SpanVisitable<'s, 'a> for Token<'s, T> { fn visit_span>(&'a self, visitor: &mut V) { let code_length = self.code.length(); visitor.visit(span::Ref { left_offset: &self.left_offset, code_length }); } } -impl<'a, 't, 's, T> SpanVisitableMut<'s, 'a> for Token<'s, T> { +impl<'a, 's, T> SpanVisitableMut<'s, 'a> for Token<'s, T> { fn visit_span_mut>(&'a mut self, visitor: &mut V) { let code_length = self.code.length(); visitor.visit_mut(span::RefMut { left_offset: &mut self.left_offset, code_length }); @@ -1289,9 +1289,9 @@ where &'a Token<'s, T>: Into> impl<'s, 'a> TreeVisitable<'s, 'a> for String {} impl<'s, 'a> TreeVisitableMut<'s, 'a> for String {} -impl<'a, 't, 's> SpanVisitable<'s, 'a> for String {} -impl<'a, 't, 's> SpanVisitableMut<'s, 'a> for String {} -impl<'a, 't, 's> ItemVisitable<'s, 'a> for String {} +impl<'a, 's> SpanVisitable<'s, 'a> for String {} +impl<'a, 's> SpanVisitableMut<'s, 'a> for String {} +impl<'a, 's> ItemVisitable<'s, 'a> for String {} impl<'s> span::Builder<'s> for String { fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> { span @@ -1300,9 +1300,9 @@ impl<'s> span::Builder<'s> for String { impl<'s, 'a> TreeVisitable<'s, 'a> for Cow<'static, str> {} impl<'s, 'a> TreeVisitableMut<'s, 'a> for Cow<'static, str> {} -impl<'a, 't, 's> SpanVisitable<'s, 'a> for Cow<'static, str> {} -impl<'a, 't, 's> SpanVisitableMut<'s, 'a> for Cow<'static, str> {} -impl<'a, 't, 's> ItemVisitable<'s, 'a> for Cow<'static, str> {} +impl<'a, 's> SpanVisitable<'s, 'a> for Cow<'static, str> {} +impl<'a, 's> SpanVisitableMut<'s, 'a> for Cow<'static, str> {} +impl<'a, 's> ItemVisitable<'s, 'a> for Cow<'static, str> {} impl<'s> span::Builder<'s> for Cow<'static, str> { fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> { span diff --git a/lib/rust/parser/src/syntax/tree/visitor/src/lib.rs b/lib/rust/parser/src/syntax/tree/visitor/src/lib.rs index 23128d51cc..66979cf54f 100644 --- a/lib/rust/parser/src/syntax/tree/visitor/src/lib.rs +++ b/lib/rust/parser/src/syntax/tree/visitor/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/parser/tests/parse.rs b/lib/rust/parser/tests/parse.rs index 2e6a582ae1..0737ca2984 100644 --- a/lib/rust/parser/tests/parse.rs +++ b/lib/rust/parser/tests/parse.rs @@ -1,7 +1,5 @@ //! Parse expressions and compare their results to expected values. -// === Features === -#![feature(let_else)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] #![allow(clippy::precedence)] diff --git a/lib/rust/prelude/src/lib.rs b/lib/rust/prelude/src/lib.rs index 9407862f33..c9c92c8d0e 100644 --- a/lib/rust/prelude/src/lib.rs +++ b/lib/rust/prelude/src/lib.rs @@ -15,6 +15,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] #![allow(incomplete_features)] // To be removed, see: https://github.com/enso-org/ide/issues/1559 #![warn(missing_copy_implementations)] diff --git a/lib/rust/prelude/src/serde.rs b/lib/rust/prelude/src/serde.rs index 8036143ecf..471278746e 100644 --- a/lib/rust/prelude/src/serde.rs +++ b/lib/rust/prelude/src/serde.rs @@ -61,12 +61,12 @@ mod tests { fn deserialize_or_default_attribute_test() { // Two structures - same except for `deserialize_or_default` atribute. // One fails to deserialize, second one goes through. - #[derive(Debug, Deserialize, PartialEq, Serialize)] + #[derive(Debug, Deserialize, PartialEq, Eq, Serialize)] struct Foo { blah: String, boom: Vec, } - #[derive(Debug, Deserialize, PartialEq, Serialize)] + #[derive(Debug, Deserialize, Eq, PartialEq, Serialize)] struct Bar { #[serde(deserialize_with = "deserialize_or_default")] blah: String, @@ -82,7 +82,7 @@ mod tests { #[test] fn deserialize_or_default_attribute_for_optional_field() { - #[derive(Debug, Deserialize, PartialEq, Serialize)] + #[derive(Debug, Deserialize, Eq, PartialEq, Serialize)] struct Foo { #[serde(default, deserialize_with = "deserialize_or_default")] blah: Option, diff --git a/lib/rust/prelude/src/tp.rs b/lib/rust/prelude/src/tp.rs index bd0e7b650f..1c962da137 100644 --- a/lib/rust/prelude/src/tp.rs +++ b/lib/rust/prelude/src/tp.rs @@ -10,7 +10,7 @@ use super::std_reexports::*; /// Placeholder type used to represent any value type. It is useful to define type-level relations /// like defining an unit with any quantity, let it be distance or mass. -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Anything {} diff --git a/lib/rust/profiler/data/src/bin/devtools.rs b/lib/rust/profiler/data/src/bin/devtools.rs index 6ca23efaf3..e0be8851bb 100644 --- a/lib/rust/profiler/data/src/bin/devtools.rs +++ b/lib/rust/profiler/data/src/bin/devtools.rs @@ -21,6 +21,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] @@ -104,7 +105,7 @@ impl<'p, Metadata> IntervalTranslator<'p, Metadata> { let mut builder = Self { profile, events }; // We skip the root node APP_LIFETIME, which is not a real measurement. for child in &profile.root_interval().children { - builder.visit_interval(*child, 0); + builder.visit_interval(*child); } let Self { events, .. } = builder; events @@ -113,7 +114,7 @@ impl<'p, Metadata> IntervalTranslator<'p, Metadata> { impl<'p, Metadata> IntervalTranslator<'p, Metadata> { /// Translate an interval, and its children. - fn visit_interval(&mut self, active: data::IntervalId, row: u32) { + fn visit_interval(&mut self, active: data::IntervalId) { let active = &self.profile[active]; let measurement = &self.profile[active.measurement]; let start = active.interval.start.into_ms(); @@ -133,7 +134,7 @@ impl<'p, Metadata> IntervalTranslator<'p, Metadata> { self.events.push(event); } for child in &active.children { - self.visit_interval(*child, row + 1); + self.visit_interval(*child); } } } diff --git a/lib/rust/profiler/data/src/bin/intervals.rs b/lib/rust/profiler/data/src/bin/intervals.rs index 7237585419..46544b4ff9 100644 --- a/lib/rust/profiler/data/src/bin/intervals.rs +++ b/lib/rust/profiler/data/src/bin/intervals.rs @@ -17,6 +17,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/profiler/data/src/bin/measurements.rs b/lib/rust/profiler/data/src/bin/measurements.rs index ab8ed01c16..e03bc2b854 100644 --- a/lib/rust/profiler/data/src/bin/measurements.rs +++ b/lib/rust/profiler/data/src/bin/measurements.rs @@ -15,6 +15,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/profiler/data/src/bin/processes.rs b/lib/rust/profiler/data/src/bin/processes.rs index a75cffe1df..18a94131d6 100644 --- a/lib/rust/profiler/data/src/bin/processes.rs +++ b/lib/rust/profiler/data/src/bin/processes.rs @@ -13,6 +13,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/profiler/data/src/lib.rs b/lib/rust/profiler/data/src/lib.rs index ef6ecd830c..bbd4840d7e 100644 --- a/lib/rust/profiler/data/src/lib.rs +++ b/lib/rust/profiler/data/src/lib.rs @@ -75,6 +75,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/profiler/demo-data/src/lib.rs b/lib/rust/profiler/demo-data/src/lib.rs index 2cff493546..b10f68098a 100644 --- a/lib/rust/profiler/demo-data/src/lib.rs +++ b/lib/rust/profiler/demo-data/src/lib.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(unused_qualifications)] diff --git a/lib/rust/profiler/flame-graph/src/lib.rs b/lib/rust/profiler/flame-graph/src/lib.rs index 987a194ebd..7470c7168c 100644 --- a/lib/rust/profiler/flame-graph/src/lib.rs +++ b/lib/rust/profiler/flame-graph/src/lib.rs @@ -5,6 +5,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use enso_profiler as profiler; diff --git a/lib/rust/profiler/macros/src/lib.rs b/lib/rust/profiler/macros/src/lib.rs index 71aab72f81..3a9d78d60c 100644 --- a/lib/rust/profiler/macros/src/lib.rs +++ b/lib/rust/profiler/macros/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/profiler/src/lib.rs b/lib/rust/profiler/src/lib.rs index 7ca005a8c4..05bb3b5e06 100644 --- a/lib/rust/profiler/src/lib.rs +++ b/lib/rust/profiler/src/lib.rs @@ -130,6 +130,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] diff --git a/lib/rust/profiler/src/log.rs b/lib/rust/profiler/src/log.rs index 2b3b7bf809..ab71d20acd 100644 --- a/lib/rust/profiler/src/log.rs +++ b/lib/rust/profiler/src/log.rs @@ -35,7 +35,7 @@ impl Log { pub fn append(&self, element: T) { // Note [Log Safety] unsafe { - (&mut *self.0.get()).push(element); + (*self.0.get()).push(element); } } diff --git a/lib/rust/reflect/macros/src/lib.rs b/lib/rust/reflect/macros/src/lib.rs index 91b92e0984..466071d8c6 100644 --- a/lib/rust/reflect/macros/src/lib.rs +++ b/lib/rust/reflect/macros/src/lib.rs @@ -106,6 +106,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/reflect/src/lib.rs b/lib/rust/reflect/src/lib.rs index cc8f14d8ba..db22c27402 100644 --- a/lib/rust/reflect/src/lib.rs +++ b/lib/rust/reflect/src/lib.rs @@ -113,6 +113,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/shapely/macros/src/derive_clone_ref.rs b/lib/rust/shapely/macros/src/derive_clone_ref.rs index 5e7bb8356a..eb6a8eaf0a 100644 --- a/lib/rust/shapely/macros/src/derive_clone_ref.rs +++ b/lib/rust/shapely/macros/src/derive_clone_ref.rs @@ -142,7 +142,7 @@ pub fn is_custom_bound(name_val: &MetaNameValue) -> bool { /// Panics if this is our attribute but the syntax is not correct. pub fn clone_ref_bounds(attr: &Attribute) -> Option> { // Silently ignore foreign attributes. Be picky only about our one. - is_clone_ref_customization(attr).then(|| ())?; + is_clone_ref_customization(attr).then_some(())?; let meta = attr.parse_meta().expect("Failed to parse attribute contents."); let list = match meta { diff --git a/lib/rust/shapely/macros/src/derive_iterator.rs b/lib/rust/shapely/macros/src/derive_iterator.rs index d1110976d2..af2f01b9ab 100644 --- a/lib/rust/shapely/macros/src/derive_iterator.rs +++ b/lib/rust/shapely/macros/src/derive_iterator.rs @@ -17,7 +17,7 @@ use itertools::Itertools; // ============= /// Describes whether a mutable or immutable iterator is being derived. -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum IsMut { Mutable, Immutable, @@ -285,7 +285,7 @@ impl DerivingIterator<'_> { let iterator_tydefs = quote!( // type FooIterator<'t, T> = impl Iterator; // type FooIteratorMut<'t, T> = impl Iterator; - type #t_iterator<'t, #(#iterator_params),*> = + type #t_iterator<'t, #(#iterator_params: 't),*> = impl Iterator; ); let matched_fields = DependentValue::collect_struct(data, target_param); diff --git a/lib/rust/shapely/macros/src/lib.rs b/lib/rust/shapely/macros/src/lib.rs index 4d270beada..7a38f290aa 100644 --- a/lib/rust/shapely/macros/src/lib.rs +++ b/lib/rust/shapely/macros/src/lib.rs @@ -3,11 +3,11 @@ //! necessary for the generated code to compile. // === Features === -#![feature(bool_to_option)] #![feature(exact_size_is_empty)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/lib/rust/shapely/src/lib.rs b/lib/rust/shapely/src/lib.rs index 6bd63908db..6c8721b02f 100644 --- a/lib/rust/shapely/src/lib.rs +++ b/lib/rust/shapely/src/lib.rs @@ -11,6 +11,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/shortcuts/example/src/lib.rs b/lib/rust/shortcuts/example/src/lib.rs index 9db743a7bb..7504fb7e40 100644 --- a/lib/rust/shortcuts/example/src/lib.rs +++ b/lib/rust/shortcuts/example/src/lib.rs @@ -1,6 +1,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] use enso_prelude::*; diff --git a/lib/rust/shortcuts/src/lib.rs b/lib/rust/shortcuts/src/lib.rs index 7955f8edef..fe29eed63b 100644 --- a/lib/rust/shortcuts/src/lib.rs +++ b/lib/rust/shortcuts/src/lib.rs @@ -6,6 +6,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/text/src/lib.rs b/lib/rust/text/src/lib.rs index 506ee3ad2e..1f3eaa8472 100644 --- a/lib/rust/text/src/lib.rs +++ b/lib/rust/text/src/lib.rs @@ -84,6 +84,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/types/src/lib.rs b/lib/rust/types/src/lib.rs index e22d7a92a8..2f8a713832 100644 --- a/lib/rust/types/src/lib.rs +++ b/lib/rust/types/src/lib.rs @@ -12,6 +12,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/lib/rust/web/src/lib.rs b/lib/rust/web/src/lib.rs index d1e53abd9d..cc27fee472 100644 --- a/lib/rust/web/src/lib.rs +++ b/lib/rust/web/src/lib.rs @@ -13,6 +13,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_copy_implementations)] diff --git a/run b/run index 42c042388b..6342840109 100755 --- a/run +++ b/run @@ -4,7 +4,7 @@ set -e # Exit on error. # Get the directory of the script, as per https://stackoverflow.com/a/246128 SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -BIN_CRATE_NAME=enso-build3 +BIN_CRATE_NAME=enso-build-cli TARGET_DIR="${SCRIPT_DIR}/target/enso-build/" TARGET_EXE="${TARGET_DIR}buildscript/${BIN_CRATE_NAME}" diff --git a/run.cmd b/run.cmd index 0aa2a4f864..eabf9433ef 100644 --- a/run.cmd +++ b/run.cmd @@ -1,6 +1,6 @@ pushd %~dp0 @ set TARGET_DIR=%~dp0target\enso-build -@ set TARGET_EXE=%TARGET_DIR%\buildscript\enso-build3.exe -cargo build --profile buildscript --target-dir "%TARGET_DIR%" --package enso-build3 && "%TARGET_EXE%" %* +@ set TARGET_EXE=%TARGET_DIR%\buildscript\enso-build-cli.exe +cargo build --profile buildscript --target-dir "%TARGET_DIR%" --package enso-build-cli && "%TARGET_EXE%" %* popd exit /b %ERRORLEVEL% diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 97d572ad50..7e4784e709 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "nightly-2022-04-07" -components = ["clippy"] +channel = "nightly-2022-09-20" +components = ["clippy", "rustfmt"] profile = "default" targets = ["wasm32-unknown-unknown"] diff --git a/rustfmt.toml b/rustfmt.toml index 9a6fa866ca..dad37d76c2 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -9,7 +9,6 @@ format_code_in_doc_comments = true comment_width = 100 normalize_comments = false normalize_doc_attributes = false -license_template_path = "" format_strings = false format_macro_matchers = false format_macro_bodies = true @@ -60,8 +59,6 @@ skip_children = false hide_parse_errors = false error_on_line_overflow = false error_on_unformatted = false -report_todo = "Never" -report_fixme = "Never" ignore = [] emit_mode = "Files" make_backup = false diff --git a/tools/language-server/logstat/Cargo.toml b/tools/language-server/logstat/Cargo.toml index f00dbc44a7..4d46f8ccc6 100644 --- a/tools/language-server/logstat/Cargo.toml +++ b/tools/language-server/logstat/Cargo.toml @@ -10,5 +10,5 @@ lazy_static = "1.4.0" enso-prelude = { path = "../../../lib/rust/prelude" } regex = "1.6.0" time = { version = "0.3", features = ["formatting", "parsing"] } -tokio = { version = "1", features = ["full"] } +tokio = { workspace = true } tokio-stream = { version = "0.1.9", features = ["io-util"] } diff --git a/tools/language-server/logstat/src/main.rs b/tools/language-server/logstat/src/main.rs index 994407adee..5601f691d0 100644 --- a/tools/language-server/logstat/src/main.rs +++ b/tools/language-server/logstat/src/main.rs @@ -4,6 +4,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] diff --git a/tools/language-server/wstest/Cargo.toml b/tools/language-server/wstest/Cargo.toml index 56208ede3e..41bcddcf8d 100644 --- a/tools/language-server/wstest/Cargo.toml +++ b/tools/language-server/wstest/Cargo.toml @@ -12,7 +12,7 @@ futures = "0.3" enso-prelude = { path = "../../../lib/rust/prelude" } regex = "1.6.0" time = { version = "0.3", features = ["formatting"] } -tokio = { version = "1.19.2", features = ["full"] } +tokio = { workspace = true } tokio-stream = { version = "0.1.9", features = ["io-util"] } url = "2.2" websocket-lite = "0.5" diff --git a/tools/language-server/wstest/src/main.rs b/tools/language-server/wstest/src/main.rs index 732070fe4a..7a06c45e67 100644 --- a/tools/language-server/wstest/src/main.rs +++ b/tools/language-server/wstest/src/main.rs @@ -3,6 +3,7 @@ // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] // === Non-Standard Linter Configuration === #![warn(missing_docs)]