mirror of
https://github.com/enso-org/enso.git
synced 2024-12-23 02:21:54 +03:00
CI improvements / IDE2 releasing (#8059)
This commit is contained in:
parent
2df2b50f4d
commit
e693147c17
16
.github/workflows/gui.yml
vendored
16
.github/workflows/gui.yml
vendored
@ -24,7 +24,7 @@ jobs:
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
@ -498,7 +498,7 @@ jobs:
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
@ -554,7 +554,7 @@ jobs:
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
@ -612,7 +612,7 @@ jobs:
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
@ -670,7 +670,7 @@ jobs:
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
@ -730,7 +730,7 @@ jobs:
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
@ -788,7 +788,7 @@ jobs:
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
@ -853,7 +853,7 @@ jobs:
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
|
206
.github/workflows/release.yml
vendored
206
.github/workflows/release.yml
vendored
@ -448,6 +448,9 @@ jobs:
|
||||
- enso-build-ci-gen-upload-ide-linux
|
||||
- enso-build-ci-gen-upload-ide-macos
|
||||
- enso-build-ci-gen-upload-ide-windows
|
||||
- enso-build-ci-gen-upload-ide2-linux
|
||||
- enso-build-ci-gen-upload-ide2-macos
|
||||
- enso-build-ci-gen-upload-ide2-windows
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- Linux
|
||||
@ -511,7 +514,7 @@ jobs:
|
||||
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
|
||||
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
|
||||
enso-build-ci-gen-upload-ide-linux:
|
||||
name: Build IDE (linux)
|
||||
name: Build Old IDE (linux)
|
||||
needs:
|
||||
- enso-build-ci-gen-draft-release-linux
|
||||
- enso-build-ci-gen-job-build-wasm-linux
|
||||
@ -576,7 +579,7 @@ jobs:
|
||||
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
|
||||
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
|
||||
enso-build-ci-gen-upload-ide-macos:
|
||||
name: Build IDE (macos)
|
||||
name: Build Old IDE (macos)
|
||||
needs:
|
||||
- enso-build-ci-gen-draft-release-linux
|
||||
- enso-build-ci-gen-job-build-wasm-linux
|
||||
@ -644,7 +647,7 @@ jobs:
|
||||
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
|
||||
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
|
||||
enso-build-ci-gen-upload-ide-windows:
|
||||
name: Build IDE (windows)
|
||||
name: Build Old IDE (windows)
|
||||
needs:
|
||||
- enso-build-ci-gen-draft-release-linux
|
||||
- enso-build-ci-gen-job-build-wasm-linux
|
||||
@ -710,6 +713,203 @@ jobs:
|
||||
env:
|
||||
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
|
||||
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
|
||||
enso-build-ci-gen-upload-ide2-linux:
|
||||
name: Build New IDE (linux)
|
||||
needs:
|
||||
- enso-build-ci-gen-draft-release-linux
|
||||
- enso-build-ci-gen-job-upload-backend-linux
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- Linux
|
||||
- engine
|
||||
steps:
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Setup conda (GH runners only)
|
||||
uses: s-weigand/setup-conda@v1.0.6
|
||||
with:
|
||||
update-conda: false
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
|
||||
- if: runner.os == 'Windows'
|
||||
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
|
||||
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
|
||||
shell: cmd
|
||||
- if: runner.os != 'Windows'
|
||||
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
|
||||
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
|
||||
shell: bash
|
||||
- name: Checking out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
clean: false
|
||||
submodules: recursive
|
||||
- name: Build Script Setup
|
||||
run: ./run --help
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
|
||||
name: Clean before
|
||||
run: ./run git-clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: ./run ide2 upload --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- if: failure() && runner.os == 'Windows'
|
||||
name: List files if failed (Windows)
|
||||
run: Get-ChildItem -Force -Recurse
|
||||
- if: failure() && runner.os != 'Windows'
|
||||
name: List files if failed (non-Windows)
|
||||
run: ls -lAR
|
||||
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
|
||||
name: Clean after
|
||||
run: ./run git-clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
env:
|
||||
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
|
||||
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
|
||||
enso-build-ci-gen-upload-ide2-macos:
|
||||
name: Build New IDE (macos)
|
||||
needs:
|
||||
- enso-build-ci-gen-draft-release-linux
|
||||
- enso-build-ci-gen-job-upload-backend-macos
|
||||
runs-on:
|
||||
- macos-latest
|
||||
steps:
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Setup conda (GH runners only)
|
||||
uses: s-weigand/setup-conda@v1.0.6
|
||||
with:
|
||||
update-conda: false
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
|
||||
- if: runner.os == 'Windows'
|
||||
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
|
||||
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
|
||||
shell: cmd
|
||||
- if: runner.os != 'Windows'
|
||||
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
|
||||
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
|
||||
shell: bash
|
||||
- name: Checking out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
clean: false
|
||||
submodules: recursive
|
||||
- name: Build Script Setup
|
||||
run: ./run --help
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
|
||||
name: Clean before
|
||||
run: ./run git-clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: ./run ide2 upload --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}
|
||||
env:
|
||||
APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
||||
APPLEIDPASS: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
||||
CSC_IDENTITY_AUTO_DISCOVERY: "true"
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }}
|
||||
CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- if: failure() && runner.os == 'Windows'
|
||||
name: List files if failed (Windows)
|
||||
run: Get-ChildItem -Force -Recurse
|
||||
- if: failure() && runner.os != 'Windows'
|
||||
name: List files if failed (non-Windows)
|
||||
run: ls -lAR
|
||||
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
|
||||
name: Clean after
|
||||
run: ./run git-clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
env:
|
||||
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
|
||||
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
|
||||
enso-build-ci-gen-upload-ide2-windows:
|
||||
name: Build New IDE (windows)
|
||||
needs:
|
||||
- enso-build-ci-gen-draft-release-linux
|
||||
- enso-build-ci-gen-job-upload-backend-windows
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- Windows
|
||||
- engine
|
||||
steps:
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Setup conda (GH runners only)
|
||||
uses: s-weigand/setup-conda@v1.0.6
|
||||
with:
|
||||
update-conda: false
|
||||
conda-channels: anaconda, conda-forge
|
||||
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
|
||||
name: Installing wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.10.2
|
||||
- name: Expose Artifact API and context information.
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
|
||||
- if: runner.os == 'Windows'
|
||||
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
|
||||
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
|
||||
shell: cmd
|
||||
- if: runner.os != 'Windows'
|
||||
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
|
||||
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
|
||||
shell: bash
|
||||
- name: Checking out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
clean: false
|
||||
submodules: recursive
|
||||
- name: Build Script Setup
|
||||
run: ./run --help
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
|
||||
name: Clean before
|
||||
run: ./run git-clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: ./run ide2 upload --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT_PASSWORD }}
|
||||
WIN_CSC_LINK: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT }}
|
||||
- if: failure() && runner.os == 'Windows'
|
||||
name: List files if failed (Windows)
|
||||
run: Get-ChildItem -Force -Recurse
|
||||
- if: failure() && runner.os != 'Windows'
|
||||
name: List files if failed (non-Windows)
|
||||
run: ls -lAR
|
||||
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
|
||||
name: Clean after
|
||||
run: ./run git-clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
env:
|
||||
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
|
||||
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
|
||||
env:
|
||||
ENSO_BUILD_SKIP_VERSION_CHECK: "true"
|
||||
ENSO_EDITION: ${{ inputs.version }}
|
||||
|
@ -10,6 +10,7 @@ use crate::version::ENSO_RELEASE_MODE;
|
||||
use crate::version::ENSO_VERSION;
|
||||
|
||||
use ide_ci::actions::workflow::definition::checkout_repo_step;
|
||||
use ide_ci::actions::workflow::definition::get_input_expression;
|
||||
use ide_ci::actions::workflow::definition::is_non_windows_runner;
|
||||
use ide_ci::actions::workflow::definition::is_windows_runner;
|
||||
use ide_ci::actions::workflow::definition::run;
|
||||
@ -47,9 +48,6 @@ pub mod step;
|
||||
|
||||
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DeluxeRunner;
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct BenchmarkRunner;
|
||||
|
||||
@ -115,21 +113,6 @@ pub fn release_concurrency() -> Concurrency {
|
||||
Concurrency::new(RELEASE_CONCURRENCY_GROUP)
|
||||
}
|
||||
|
||||
/// Get expression that gets input from the workflow dispatch. See:
|
||||
/// <https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#providing-inputs>
|
||||
pub fn get_input_expression(name: impl Into<String>) -> String {
|
||||
wrap_expression(format!("inputs.{}", name.into()))
|
||||
}
|
||||
|
||||
impl RunsOn for DeluxeRunner {
|
||||
fn runs_on(&self) -> Vec<RunnerLabel> {
|
||||
vec![RunnerLabel::MwuDeluxe]
|
||||
}
|
||||
fn os_name(&self) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl RunsOn for BenchmarkRunner {
|
||||
fn runs_on(&self) -> Vec<RunnerLabel> {
|
||||
vec![RunnerLabel::Benchmark]
|
||||
@ -251,12 +234,25 @@ impl JobArchetype for PublishRelease {
|
||||
pub struct UploadIde;
|
||||
impl JobArchetype for UploadIde {
|
||||
fn job(&self, os: OS) -> Job {
|
||||
plain_job_customized(&os, "Build IDE", "ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}", |step|
|
||||
plain_job_customized(&os, "Build Old IDE", "ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}", |step|
|
||||
vec![expose_os_specific_signing_secret(os, step)]
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct UploadIde2;
|
||||
impl JobArchetype for UploadIde2 {
|
||||
fn job(&self, os: OS) -> Job {
|
||||
plain_job_customized(
|
||||
&os,
|
||||
"Build New IDE",
|
||||
"ide2 upload --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}",
|
||||
|step| vec![expose_os_specific_signing_secret(os, step)],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct PromoteReleaseJob;
|
||||
impl JobArchetype for PromoteReleaseJob {
|
||||
@ -335,6 +331,10 @@ fn add_release_steps(workflow: &mut Workflow) -> Result {
|
||||
]);
|
||||
packaging_job_ids.push(build_ide_job_id.clone());
|
||||
|
||||
let build_ide2_job_id =
|
||||
workflow.add_dependent(os, UploadIde2, [&prepare_job_id, &backend_job_id]);
|
||||
packaging_job_ids.push(build_ide2_job_id.clone());
|
||||
|
||||
// Deploying our release to cloud needs to be done only once.
|
||||
// We could do this on any platform, but we choose Linux, because it's most easily
|
||||
// available and performant.
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::engine::bundle::GraalVmVersion;
|
||||
use crate::get_graal_packages_version;
|
||||
use crate::get_graal_version;
|
||||
use crate::paths::generated;
|
||||
@ -27,7 +28,6 @@ pub mod env;
|
||||
pub mod package;
|
||||
pub mod sbt;
|
||||
|
||||
use crate::engine::bundle::GraalVmVersion;
|
||||
pub use context::RunContext;
|
||||
|
||||
|
||||
|
@ -27,6 +27,8 @@ pub trait IsBundle: AsRef<Path> + IsArtifact {
|
||||
}
|
||||
|
||||
/// Path to the directory where GraalVM is placed.
|
||||
///
|
||||
/// This will be the JAVA_HOME for the bundled component.
|
||||
fn graalvm_dir(&self) -> PathBuf;
|
||||
|
||||
/// Path to the directory where Engine package is placed.
|
||||
|
@ -222,13 +222,26 @@ impl RunContext {
|
||||
ide_ci::fs::remove_if_exists(&self.paths.repo_root.engine.runtime.bench_report_xml)?;
|
||||
}
|
||||
|
||||
let test_results_dir = if self.config.test_standard_library {
|
||||
|
||||
let _test_results_upload_guard =
|
||||
if self.config.test_scala || self.config.test_standard_library {
|
||||
// If we run tests, make sure that old and new results won't end up mixed together.
|
||||
let test_results_dir = ENSO_TEST_JUNIT_DIR
|
||||
.get()
|
||||
.unwrap_or_else(|_| self.paths.repo_root.target.test_results.path.clone());
|
||||
ide_ci::fs::reset_dir(&test_results_dir)?;
|
||||
Some(test_results_dir)
|
||||
|
||||
// If we are run in CI conditions and we prepared some test results, we want to
|
||||
// upload them as a separate artifact to ease debugging. And we do want to do that
|
||||
// even if the tests fail and we are leaving the scope with an error.
|
||||
is_in_env().then(|| {
|
||||
scopeguard::guard(test_results_dir, |test_results_dir| {
|
||||
ide_ci::global::spawn(
|
||||
"Upload test results",
|
||||
upload_test_results(test_results_dir),
|
||||
);
|
||||
})
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -384,18 +397,6 @@ impl RunContext {
|
||||
if self.config.test_standard_library {
|
||||
enso.run_tests(IrCaches::No, &sbt, PARALLEL_ENSO_TESTS).await?;
|
||||
}
|
||||
// If we are run in CI conditions and we prepared some test results, we want to upload
|
||||
// them as a separate artifact to ease debugging.
|
||||
if let Some(test_results_dir) = test_results_dir && is_in_env() {
|
||||
// Each platform gets its own log results, so we need to generate unique names.
|
||||
let name = format!("Test_Results_{TARGET_OS}");
|
||||
if let Err(err) = ide_ci::actions::artifacts::upload_compressed_directory(&test_results_dir, name)
|
||||
.await {
|
||||
// We wouldn't want to fail the whole build if we can't upload the test results.
|
||||
// Still, it should be somehow visible in the build summary.
|
||||
ide_ci::actions::workflow::message(MessageLevel::Warning, format!("Failed to upload test results: {err}"));
|
||||
}
|
||||
}
|
||||
|
||||
perhaps_test_java_generated_from_rust_job.await.transpose()?;
|
||||
|
||||
@ -639,3 +640,25 @@ impl RunContext {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Upload the directory with Enso-generated test results.
|
||||
///
|
||||
/// This is meant to ease debugging, it does not really affect the build.
|
||||
#[context("Failed to upload test results.")]
|
||||
pub async fn upload_test_results(test_results_dir: PathBuf) -> Result {
|
||||
// Each platform gets its own log results, so we need to generate unique
|
||||
// names.
|
||||
let name = format!("Test_Results_{TARGET_OS}");
|
||||
let upload_result =
|
||||
ide_ci::actions::artifacts::upload_compressed_directory(&test_results_dir, name).await;
|
||||
if let Err(err) = &upload_result {
|
||||
// We wouldn't want to fail the whole build if we can't upload the test
|
||||
// results. Still, it should be somehow
|
||||
// visible in the build summary.
|
||||
ide_ci::actions::workflow::message(
|
||||
MessageLevel::Warning,
|
||||
format!("Failed to upload test results: {err}"),
|
||||
);
|
||||
}
|
||||
upload_result
|
||||
}
|
||||
|
@ -26,7 +26,6 @@ use octocrab::models::repos::Release;
|
||||
use octocrab::params::repos::Reference;
|
||||
use reqwest::Response;
|
||||
use serde_json::json;
|
||||
use std::env::consts::EXE_SUFFIX;
|
||||
use tempfile::tempdir;
|
||||
|
||||
|
||||
@ -190,19 +189,8 @@ pub async fn get_engine_package<R: IsRepo>(
|
||||
triple: &TargetTriple,
|
||||
) -> Result<generated::EnginePackage> {
|
||||
let release_id = crate::env::ENSO_RELEASE_ID.get()?;
|
||||
let package_name = generated::RepoRootBuiltDistribution::new_root(
|
||||
".",
|
||||
triple.versions.edition_name(),
|
||||
EXE_SUFFIX,
|
||||
triple.to_string(),
|
||||
triple.versions.version.to_string(),
|
||||
)
|
||||
.enso_engine_triple
|
||||
.file_name()
|
||||
.context("Failed to get Engine Package name.")?
|
||||
.as_str()
|
||||
.to_string();
|
||||
|
||||
let package_name =
|
||||
generated::RepoRootBuiltDistributionEnsoEngineTriple::segment_name(triple.to_string());
|
||||
let release = repo.find_release_by_id(release_id).await?;
|
||||
let asset = github::find_asset_by_text(&release, &package_name)?;
|
||||
let temp_for_archive = tempdir()?;
|
||||
|
@ -19,6 +19,10 @@ use std::sync::atomic::Ordering;
|
||||
/// We use a very long timeout because we want to avoid cancelling jobs that are just slow.
|
||||
pub const DEFAULT_TIMEOUT_IN_MINUTES: u32 = 360;
|
||||
|
||||
/// The name of the field in the matrix strategy that we use by convention for different
|
||||
/// runner labels (OS-es, but also runner-identifying labels).
|
||||
const MATRIX_STRATEGY_OS: &str = "os";
|
||||
|
||||
pub fn wrap_expression(expression: impl AsRef<str>) -> String {
|
||||
format!("${{{{ {} }}}}", expression.as_ref())
|
||||
}
|
||||
@ -32,6 +36,11 @@ pub fn env_expression(environment_variable: &impl RawVariable) -> String {
|
||||
wrap_expression(format!("env.{}", environment_variable.name()))
|
||||
}
|
||||
|
||||
/// Get expression that gets input from the workflow dispatch. See:
|
||||
/// <https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#providing-inputs>
|
||||
pub fn get_input_expression(name: impl Into<String>) -> String {
|
||||
wrap_expression(format!("inputs.{}", name.into()))
|
||||
}
|
||||
|
||||
pub fn is_github_hosted() -> String {
|
||||
"startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')".into()
|
||||
@ -781,12 +790,10 @@ impl Strategy {
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn new_os(labels: impl Serialize) -> Strategy {
|
||||
let oses = serde_json::to_value(labels).unwrap();
|
||||
Strategy {
|
||||
fail_fast: Some(false),
|
||||
matrix: [("os".to_string(), oses)].into_iter().collect(),
|
||||
}
|
||||
pub fn new_os(values: impl IntoIterator<Item: Serialize>) -> Result<Strategy> {
|
||||
let mut matrix = Self { fail_fast: Some(false), ..default() };
|
||||
matrix.insert_to_matrix(MATRIX_STRATEGY_OS, values)?;
|
||||
Ok(matrix)
|
||||
}
|
||||
}
|
||||
|
||||
@ -824,6 +831,15 @@ impl Step {
|
||||
}
|
||||
|
||||
/// Expose a secret as an environment variable with a given name.
|
||||
pub fn with_input_exposed_as(
|
||||
self,
|
||||
input: impl AsRef<str>,
|
||||
given_name: impl Into<String>,
|
||||
) -> Self {
|
||||
let input_expr = get_input_expression(format!("secrets.{}", input.as_ref()));
|
||||
self.with_env(given_name, input_expr)
|
||||
}
|
||||
|
||||
pub fn with_secret_exposed_as(
|
||||
self,
|
||||
secret: impl AsRef<str>,
|
||||
@ -978,11 +994,11 @@ pub enum RunnerLabel {
|
||||
WindowsLatest,
|
||||
#[serde(rename = "X64")]
|
||||
X64,
|
||||
#[serde(rename = "mwu-deluxe")]
|
||||
MwuDeluxe,
|
||||
#[serde(rename = "benchmark")]
|
||||
Benchmark,
|
||||
#[serde(rename = "${{ matrix.os }}")]
|
||||
#[serde(rename = "metarunner")]
|
||||
Metarunner,
|
||||
#[serde(rename = "${{ matrix.os }}")] // Must be in sync with [`MATRIX_STRATEGY_OS`].
|
||||
MatrixOs,
|
||||
}
|
||||
|
||||
|
@ -12,6 +12,13 @@ use reqwest::Body;
|
||||
use tracing::instrument;
|
||||
|
||||
|
||||
// ==============
|
||||
// === Export ===
|
||||
// ==============
|
||||
|
||||
pub use octocrab::models::ReleaseId as Id;
|
||||
|
||||
|
||||
|
||||
/// The extensions that will be used for the archives in the GitHub release assets.
|
||||
///
|
||||
@ -79,6 +86,21 @@ pub trait IsReleaseExt: IsRelease + Sync {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
/// Upload a new asset to the release from a given file.
|
||||
///
|
||||
/// Given closure `f` is used to transform the filename.
|
||||
#[instrument(skip_all, fields(source = %path.as_ref().display()), err)]
|
||||
async fn upload_asset_file_with_custom_name(
|
||||
&self,
|
||||
path: impl AsRef<Path> + Send,
|
||||
f: impl FnOnce(String) -> String + Send,
|
||||
) -> Result<Asset> {
|
||||
let error_msg =
|
||||
format!("Failed to upload an asset from the file under {}.", path.as_ref().display());
|
||||
let filename = path.as_ref().try_file_name().map(|filename| f(filename.as_str().into()));
|
||||
async move { self.upload_asset_file_as(path, &filename?).await }.await.context(error_msg)
|
||||
}
|
||||
|
||||
/// Upload a new asset to the release from a given file.
|
||||
///
|
||||
/// The filename will be used to name the asset and deduce MIME content type.
|
||||
@ -86,9 +108,29 @@ pub trait IsReleaseExt: IsRelease + Sync {
|
||||
async fn upload_asset_file(&self, path: impl AsRef<Path> + Send) -> Result<Asset> {
|
||||
let error_msg =
|
||||
format!("Failed to upload an asset from the file under {}.", path.as_ref().display());
|
||||
async move {
|
||||
let filename = path.try_file_name()?.to_owned();
|
||||
self.upload_asset_file_as(path, filename).await
|
||||
}
|
||||
.await
|
||||
.context(error_msg)
|
||||
}
|
||||
|
||||
/// Upload a new asset to the release from a given file with a custom name.
|
||||
#[instrument(skip_all, fields(source = %path.as_ref().display(), asset = %asset_filename.as_ref().display()), err)]
|
||||
async fn upload_asset_file_as(
|
||||
&self,
|
||||
path: impl AsRef<Path> + Send,
|
||||
asset_filename: impl AsRef<Path> + Send,
|
||||
) -> Result<Asset> {
|
||||
let error_msg = format!(
|
||||
"Failed to upload an asset from the file under {} as {}.",
|
||||
path.as_ref().display(),
|
||||
asset_filename.as_ref().display()
|
||||
);
|
||||
async move {
|
||||
let path = path.as_ref().to_path_buf();
|
||||
let asset_name = path.try_file_name()?;
|
||||
let asset_name = asset_filename.as_ref().to_owned();
|
||||
let content_type = new_mime_guess::from_path(&path).first_or_octet_stream();
|
||||
let metadata = crate::fs::tokio::metadata(&path).await?;
|
||||
trace!("File metadata: {metadata:#?}.");
|
||||
|
@ -267,6 +267,14 @@ impl Docker {
|
||||
cmd.args(["push", image]);
|
||||
cmd.run_ok().await
|
||||
}
|
||||
|
||||
/// Prune, i.e. remove unused data.
|
||||
pub async fn system_prune(&self, options: &PruneOptions) -> Result {
|
||||
let mut cmd = self.cmd()?;
|
||||
cmd.arg("system").arg("prune");
|
||||
cmd.args(options.args());
|
||||
cmd.run_ok().await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@ -576,6 +584,42 @@ impl std::str::FromStr for ContainerId {
|
||||
}
|
||||
}
|
||||
|
||||
/// `docker system prune` options.
|
||||
///
|
||||
/// See https://docs.docker.com/engine/reference/commandline/system_prune/
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct PruneOptions {
|
||||
/// Remove all unused images not just dangling ones.
|
||||
pub all: bool,
|
||||
/// Do not prompt for confirmation.
|
||||
pub force: bool,
|
||||
/// Remove all unused local volumes.
|
||||
pub volumes: bool,
|
||||
/// Provide filter values (e.g. ‘label=<key>=<value>’).
|
||||
pub filter: Vec<String>,
|
||||
}
|
||||
|
||||
impl PruneOptions {
|
||||
/// Format CLI arguments for `docker system prune`.
|
||||
pub fn args(&self) -> Vec<OsString> {
|
||||
let mut ret = Vec::new();
|
||||
if self.all {
|
||||
ret.push("--all".into());
|
||||
}
|
||||
if self.force {
|
||||
ret.push("--force".into());
|
||||
}
|
||||
if self.volumes {
|
||||
ret.push("--volumes".into());
|
||||
}
|
||||
for filter in &self.filter {
|
||||
ret.push("--filter".into());
|
||||
ret.push(filter.into());
|
||||
}
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -185,7 +185,8 @@ pub struct Cli {
|
||||
/// Describe where to get a target artifacts from.
|
||||
///
|
||||
/// This is the CLI representation of a [crate::source::Source] for a given target.
|
||||
#[derive(Args, Clone, Debug, PartialEq)]
|
||||
#[derive(Args, Clone, Debug, Derivative)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct Source<Target: IsTargetSource> {
|
||||
/// How the given target should be acquired.
|
||||
#[clap(name = Target::SOURCE_NAME, arg_enum, long, default_value_t= SourceKind::Build,
|
||||
@ -219,9 +220,11 @@ pub struct Source<Target: IsTargetSource> {
|
||||
pub release: Option<String>,
|
||||
|
||||
/// Used when `SourceKind::Build` is used.
|
||||
#[derivative(PartialEq(bound = ""))]
|
||||
#[clap(flatten)]
|
||||
pub build_args: BuildDescription<Target>,
|
||||
|
||||
#[derivative(PartialEq(bound = ""))]
|
||||
#[clap(flatten)]
|
||||
pub output_path: OutputPath<Target>,
|
||||
}
|
||||
@ -261,9 +264,10 @@ impl<Target: IsTargetSource> AsRef<Path> for OutputPath<Target> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Args, Clone, PartialEq, Derivative)]
|
||||
#[derivative(Debug)]
|
||||
#[derive(Args, Clone, Derivative)]
|
||||
#[derivative(Debug, PartialEq)]
|
||||
pub struct BuildDescription<Target: IsTargetSource> {
|
||||
#[derivative(PartialEq(bound = ""))]
|
||||
#[clap(flatten)]
|
||||
pub input: Target::BuildInput,
|
||||
#[clap(name = Target::UPLOAD_ARTIFACT_NAME, long, enso_env(), default_value_t = ide_ci::actions::workflow::is_in_env())]
|
||||
|
@ -15,16 +15,19 @@ use octocrab::models::ReleaseId;
|
||||
|
||||
|
||||
|
||||
source_args_hlp!(Target, "ide", BuildInput<Gui>);
|
||||
source_args_hlp!(Target, "ide", BuildInput<Gui, Target>);
|
||||
|
||||
#[derive(Args, Clone, Debug, PartialEq)]
|
||||
pub struct BuildInput<Gui: IsTargetSource> {
|
||||
#[derive(Args, Clone, Debug, Derivative)]
|
||||
#[derivative(PartialEq)]
|
||||
pub struct BuildInput<Gui: IsTargetSource, Ide: IsTargetSource = Target> {
|
||||
#[derivative(PartialEq(bound = ""))]
|
||||
#[clap(flatten)]
|
||||
pub gui: Source<Gui>,
|
||||
#[clap(flatten)]
|
||||
pub project_manager: Source<Backend>,
|
||||
#[derivative(PartialEq(bound = ""))]
|
||||
#[clap(flatten)]
|
||||
pub output_path: OutputPath<Target>,
|
||||
pub output_path: OutputPath<Ide>,
|
||||
/// Override the default target for electron-builder. E.g. pass `dir` for unpacked directory
|
||||
/// (fastest). See <https://www.electron.build> for all supported targets.
|
||||
#[clap(long, enso_env())]
|
||||
|
@ -5,12 +5,13 @@ use crate::source_args_hlp;
|
||||
use clap::Args;
|
||||
use clap::Subcommand;
|
||||
use enso_build::project::gui2::Gui2;
|
||||
use octocrab::models::ReleaseId;
|
||||
|
||||
|
||||
|
||||
source_args_hlp!(Target, "ide2", BuildInput);
|
||||
|
||||
pub type BuildInput = crate::arg::ide::BuildInput<Gui2>;
|
||||
pub type BuildInput = crate::arg::ide::BuildInput<Gui2, Target>;
|
||||
|
||||
#[derive(Subcommand, Clone, Debug)]
|
||||
pub enum Command {
|
||||
@ -20,6 +21,14 @@ pub enum Command {
|
||||
#[clap(flatten)]
|
||||
params: BuildInput,
|
||||
},
|
||||
/// Build and upload the new IDE as a release asset.
|
||||
/// This command is intended for CI-use only.
|
||||
Upload {
|
||||
#[clap(flatten)]
|
||||
params: BuildInput,
|
||||
#[clap(long, env = *enso_build::env::ENSO_RELEASE_ID)]
|
||||
release_id: ReleaseId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Args, Clone, Debug)]
|
||||
|
@ -85,6 +85,7 @@ use ide_ci::actions::workflow::is_in_env;
|
||||
use ide_ci::cache::Cache;
|
||||
use ide_ci::define_env_var;
|
||||
use ide_ci::fs::remove_if_exists;
|
||||
use ide_ci::github::release;
|
||||
use ide_ci::github::setup_octocrab;
|
||||
use ide_ci::global;
|
||||
use ide_ci::ok_ready_boxed;
|
||||
@ -93,6 +94,7 @@ use ide_ci::programs::git;
|
||||
use ide_ci::programs::git::clean;
|
||||
use ide_ci::programs::rustc;
|
||||
use ide_ci::programs::Cargo;
|
||||
use octocrab::models::ReleaseId;
|
||||
use std::time::Duration;
|
||||
use tempfile::tempdir;
|
||||
use tokio::process::Child;
|
||||
@ -483,23 +485,45 @@ impl Processor {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
/// Get a handle to the release by its identifier.
|
||||
pub fn release(&self, id: ReleaseId) -> release::Handle {
|
||||
ide_ci::github::release::Handle::new(&self.octocrab, self.remote_repo.clone(), id)
|
||||
}
|
||||
|
||||
/// Upload IDE assets from the build job to the given release.
|
||||
pub fn upload_ide_assets(
|
||||
&self,
|
||||
build_job: BoxFuture<'static, Result<ide::Artifact>>,
|
||||
release_id: ReleaseId,
|
||||
name_prefix: Option<String>,
|
||||
) -> BoxFuture<'static, Result> {
|
||||
let release = self.release(release_id);
|
||||
let add_prefix = move |name: String| {
|
||||
if let Some(prefix) = name_prefix.clone() {
|
||||
format!("{prefix}-{name}")
|
||||
} else {
|
||||
name
|
||||
}
|
||||
};
|
||||
async move {
|
||||
let artifacts = build_job.await?;
|
||||
release
|
||||
.upload_asset_file_with_custom_name(&artifacts.image, add_prefix.clone())
|
||||
.await?;
|
||||
release
|
||||
.upload_asset_file_with_custom_name(&artifacts.image_checksum, add_prefix)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
pub fn handle_ide(&self, ide: arg::ide::Target) -> BoxFuture<'static, Result> {
|
||||
match ide.command {
|
||||
arg::ide::Command::Build { params } => self.build_old_ide(params).void_ok().boxed(),
|
||||
arg::ide::Command::Upload { params, release_id } => {
|
||||
let build_job = self.build_old_ide(params);
|
||||
let release = ide_ci::github::release::Handle::new(
|
||||
&self.octocrab,
|
||||
self.remote_repo.clone(),
|
||||
release_id,
|
||||
);
|
||||
async move {
|
||||
let artifacts = build_job.await?;
|
||||
release.upload_asset_file(&artifacts.image).await?;
|
||||
release.upload_asset_file(&artifacts.image_checksum).await?;
|
||||
Ok(())
|
||||
}
|
||||
.boxed()
|
||||
self.upload_ide_assets(build_job, release_id, None)
|
||||
}
|
||||
arg::ide::Command::Start { params, ide_option } => {
|
||||
let build_job = self.build_old_ide(params);
|
||||
@ -563,6 +587,10 @@ impl Processor {
|
||||
pub fn handle_ide2(&self, ide: arg::ide2::Target) -> BoxFuture<'static, Result> {
|
||||
match ide.command {
|
||||
arg::ide2::Command::Build { params } => self.build_new_ide(params).void_ok().boxed(),
|
||||
arg::ide2::Command::Upload { params, release_id } => {
|
||||
let build_job = self.build_new_ide(params);
|
||||
self.upload_ide_assets(build_job, release_id, Some("ide2".into()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -588,7 +616,7 @@ impl Processor {
|
||||
pub fn build_ide(
|
||||
&self,
|
||||
input: ide::BuildInput<impl IsArtifact>,
|
||||
output_path: OutputPath<arg::ide::Target>,
|
||||
output_path: OutputPath<impl IsTargetSource + Send + Sync + 'static>,
|
||||
) -> BoxFuture<'static, Result<ide::Artifact>> {
|
||||
let target = Ide { target_os: self.triple.os, target_arch: self.triple.arch };
|
||||
let artifact_name_prefix = input.artifact_name.clone();
|
||||
|
@ -9,10 +9,10 @@
|
||||
//! this stage, although [`Dynamic`] methods are used so that if any classes are modified before
|
||||
//! the model is rendered to syntax, the generated methods will reflect the changes.
|
||||
|
||||
use crate::meta;
|
||||
|
||||
use crate::java::*;
|
||||
|
||||
use crate::meta;
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
|
@ -1,5 +1,7 @@
|
||||
//! Representation of datatype definitions in the Java typesystem.
|
||||
|
||||
|
||||
|
||||
mod from_meta;
|
||||
#[cfg(feature = "graphviz")]
|
||||
mod graphviz;
|
||||
|
@ -1,14 +1,16 @@
|
||||
//! Run the parser from the command line, and output the raw binary serialization of the AST for
|
||||
//! debugging.
|
||||
|
||||
// === Features ===
|
||||
// === Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![allow(clippy::bool_to_int_with_if)]
|
||||
#![allow(clippy::let_and_return)]
|
||||
|
||||
use std::io::Write;
|
||||
|
||||
|
||||
|
||||
fn main() {
|
||||
use std::io::Read;
|
||||
let mut input = String::new();
|
||||
|
@ -1,10 +1,14 @@
|
||||
//! Run the parser from the command line, and output the a JSON serialization of the AST for
|
||||
//! debugging.
|
||||
|
||||
// === Features ===
|
||||
// === Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
#![allow(clippy::bool_to_int_with_if)]
|
||||
#![allow(clippy::let_and_return)]
|
||||
|
||||
|
||||
|
||||
// === Non-Standard Linter Configuration ===
|
||||
|
||||
fn main() {
|
||||
|
@ -6,6 +6,8 @@
|
||||
// === Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
#![allow(clippy::bool_to_int_with_if)]
|
||||
#![allow(clippy::let_and_return)]
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(unconditional_recursion)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
|
@ -30,6 +30,7 @@ use std::fmt::Display;
|
||||
use std::fmt::Formatter;
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === Constants ===
|
||||
// =================
|
||||
|
@ -103,7 +103,6 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Export ===
|
||||
// ==============
|
||||
@ -117,6 +116,7 @@ pub mod source;
|
||||
pub mod syntax;
|
||||
|
||||
|
||||
|
||||
/// Popular utilities, imported by most modules of this crate.
|
||||
pub mod prelude {
|
||||
pub use enso_prelude::serde_reexports::*;
|
||||
|
Loading…
Reference in New Issue
Block a user