Cloud dispatch & fixes (#3843)

This PR updates the build script:
* fixed issue where program version check was not properly triggering;
* improved `git-clean` command to correctly clear Scala artifacts;
* added `run.ps1` wrapper to the build script that works better with PowerShell than `run.cmd`;
* increased timeouts to work around failures on macOS nightly builds;
* replaced depracated GitHub Actions APIs (set-output) with their new equivalents;
* workaround for issue with electron builder (python2 lookup) on newer macOS runner images;
* GUI and backend dispatches to cloud were completed;
* release workflow allows creating RC releases.
This commit is contained in:
Michał Wawrzyniec Urbańczyk 2022-11-09 00:15:26 +01:00 committed by GitHub
parent cee7f27dc1
commit 483028dbb0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
107 changed files with 5080 additions and 3090 deletions

View File

@ -18,28 +18,28 @@ jobs:
runs-on:
- benchmark
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -50,25 +50,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend benchmark runtime
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_BUILD_MINIMAL_RUN: ${{ true == inputs.just-check }}
ENSO_BUILD_SKIP_VERSION_CHECK: "true"

View File

@ -13,28 +13,28 @@ jobs:
runs-on:
- X64
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -45,24 +45,28 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run changelog-check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_BUILD_SKIP_VERSION_CHECK: "true"

View File

@ -13,28 +13,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -45,52 +45,56 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend get
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-backend-macos:
name: Build Backend (macos)
runs-on:
- macos-latest
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -101,25 +105,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend get
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-backend-windows:
name: Build Backend (windows)
runs-on:
@ -127,28 +135,28 @@ jobs:
- Windows
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -159,25 +167,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend get
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-wasm-linux:
name: Build GUI (WASM) (linux)
runs-on:
@ -185,28 +197,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -217,52 +229,56 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-wasm-macos:
name: Build GUI (WASM) (macos)
runs-on:
- macos-latest
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -273,25 +289,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-wasm-windows:
name: Build GUI (WASM) (windows)
runs-on:
@ -299,28 +319,28 @@ jobs:
- Windows
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -331,25 +351,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-cancel-workflow-linux:
name: Cancel Previous Runs
runs-on:
@ -366,28 +390,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -398,25 +422,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run lint
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-native-test-linux:
name: Native GUI tests (linux)
runs-on:
@ -424,28 +452,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -456,25 +484,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run wasm test --no-wasm
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-package-ide-linux:
name: Package IDE (linux)
needs:
@ -485,28 +517,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -517,25 +549,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-package-ide-macos:
name: Package IDE (macos)
needs:
@ -544,28 +580,28 @@ jobs:
runs-on:
- macos-latest
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -576,11 +612,13 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run
env:
APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
@ -589,17 +627,19 @@ jobs:
CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }}
CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-package-ide-windows:
name: Package IDE (windows)
needs:
@ -610,28 +650,28 @@ jobs:
- Windows
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -642,27 +682,31 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT_PASSWORD }}
WIN_CSC_LINK: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-wasm-test-linux:
name: WASM GUI tests (linux)
runs-on:
@ -670,28 +714,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -702,24 +746,28 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run wasm test --no-native
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_BUILD_SKIP_VERSION_CHECK: "true"

View File

@ -11,28 +11,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -43,10 +43,12 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- id: prepare
run: ./run release create-draft
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
outputs:
ENSO_RELEASE_ID: ${{ steps.prepare.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ steps.prepare.outputs.ENSO_VERSION }}
@ -57,28 +59,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -89,25 +91,167 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-deploy-gui-linux:
name: Upload GUI to S3 (linux)
needs:
- enso-build-cli-ci-gen-upload-ide-linux
runs-on:
- self-hosted
- Linux
- engine
steps:
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
with:
update-conda: false
conda-channels: anaconda, conda-forge
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
shell: cmd
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
with:
clean: false
submodules: recursive
- name: Build Script Setup
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run release deploy-gui
env:
AWS_ACCESS_KEY_ID: ${{ secrets.ARTEFACT_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.ARTEFACT_S3_SECRET_ACCESS_KEY }}
GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }}
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-deploy-runtime-linux:
name: Upload Runtime to ECR (linux)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-cli-ci-gen-job-upload-backend-linux
runs-on:
- self-hosted
- Linux
- engine
steps:
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
with:
update-conda: false
conda-channels: anaconda, conda-forge
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
shell: cmd
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
with:
clean: false
submodules: recursive
- name: Build Script Setup
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run release deploy-runtime
env:
AWS_ACCESS_KEY_ID: ${{ secrets.ECR_PUSH_RUNTIME_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: eu-west-1
AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY }}
ENSO_BUILD_ECR_REPOSITORY: runtime
GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }}
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}}
ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}}
enso-build-cli-ci-gen-job-upload-backend-linux:
name: Upload Backend (linux)
needs:
@ -117,28 +261,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -149,25 +293,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend upload
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}}
ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}}
@ -178,28 +326,28 @@ jobs:
runs-on:
- macos-latest
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -210,25 +358,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend upload
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}}
ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}}
@ -241,28 +393,28 @@ jobs:
- Windows
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -273,93 +425,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend upload
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
env:
ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}}
ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}}
enso-build-cli-ci-gen-job-upload-runtime-to-ecr-linux:
name: Upload Runtime to ECR (linux)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-cli-ci-gen-job-upload-backend-linux
runs-on:
- self-hosted
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
with:
clean: false
submodules: recursive
- name: Build Script Setup
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: ./run release deploy-to-ecr
env:
AWS_ACCESS_KEY_ID: ${{ secrets.ECR_PUSH_RUNTIME_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: eu-west-1
AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY }}
ENSO_BUILD_ECR_REPOSITORY: runtime
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}}
ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}}
@ -367,7 +455,8 @@ jobs:
name: Publish release (linux)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-cli-ci-gen-job-upload-runtime-to-ecr-linux
- enso-build-cli-ci-gen-job-deploy-gui-linux
- enso-build-cli-ci-gen-job-deploy-runtime-linux
- enso-build-cli-ci-gen-upload-ide-linux
- enso-build-cli-ci-gen-upload-ide-macos
- enso-build-cli-ci-gen-upload-ide-windows
@ -376,28 +465,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -408,25 +497,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run release publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
AWS_ACCESS_KEY_ID: ${{ secrets.ARTEFACT_S3_ACCESS_KEY_ID }}
AWS_REGION: us-west-1
@ -444,28 +537,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -476,25 +569,29 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}}
ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}}
@ -507,28 +604,28 @@ jobs:
runs-on:
- macos-latest
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -539,11 +636,13 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}
env:
APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
@ -552,17 +651,19 @@ jobs:
CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }}
CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}}
ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}}
@ -577,28 +678,28 @@ jobs:
- Windows
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -609,27 +710,31 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
WIN_CSC_KEY_PASSWORD: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT_PASSWORD }}
WIN_CSC_LINK: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT }}
- name: List files if failed (Windows)
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}}
ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}}

File diff suppressed because it is too large Load Diff

View File

@ -22,28 +22,28 @@ jobs:
- Linux
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -54,70 +54,74 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend ci-check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Engine Test Reporter
timeout-minutes: 360
- if: success() || failure()
name: Engine Test Reporter
uses: dorny/test-reporter@v1
if: success() || failure()
with:
max-annotations: 50
name: Engine Tests (linux)
path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml
path-replace-backslashes: true
reporter: java-junit
- name: Standard Library Test Reporter
- if: success() || failure()
name: Standard Library Test Reporter
uses: dorny/test-reporter@v1
if: success() || failure()
with:
max-annotations: 50
name: Standard Library Tests (linux)
path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml
path-replace-backslashes: true
reporter: java-junit
- name: List files if failed (Windows)
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-ci-check-backend-macos:
name: Engine (macos)
runs-on:
- macos-latest
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -128,43 +132,47 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend ci-check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Engine Test Reporter
timeout-minutes: 360
- if: success() || failure()
name: Engine Test Reporter
uses: dorny/test-reporter@v1
if: success() || failure()
with:
max-annotations: 50
name: Engine Tests (macos)
path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml
path-replace-backslashes: true
reporter: java-junit
- name: Standard Library Test Reporter
- if: success() || failure()
name: Standard Library Test Reporter
uses: dorny/test-reporter@v1
if: success() || failure()
with:
max-annotations: 50
name: Standard Library Tests (macos)
path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml
path-replace-backslashes: true
reporter: java-junit
- name: List files if failed (Windows)
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-ci-check-backend-windows:
name: Engine (windows)
runs-on:
@ -172,28 +180,28 @@ jobs:
- Windows
- engine
steps:
- name: Setup conda (GH runners only)
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Setup conda (GH runners only)
uses: s-weigand/setup-conda@v1.0.5
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
update-conda: false
conda-channels: anaconda, conda-forge
- name: Installing wasm-pack
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
with:
version: v0.10.2
- name: Expose Artifact API and context information.
uses: actions/github-script@v6
with:
script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n "
- name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
if: runner.os == 'Windows'
shell: cmd
- name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
if: runner.os != 'Windows'
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
@ -204,42 +212,46 @@ jobs:
run: ./run --help
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean before
timeout-minutes: 360
- if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean before
run: ./run git-clean
if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
- run: ./run backend ci-check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Engine Test Reporter
timeout-minutes: 360
- if: success() || failure()
name: Engine Test Reporter
uses: dorny/test-reporter@v1
if: success() || failure()
with:
max-annotations: 50
name: Engine Tests (windows)
path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml
path-replace-backslashes: true
reporter: java-junit
- name: Standard Library Test Reporter
- if: success() || failure()
name: Standard Library Test Reporter
uses: dorny/test-reporter@v1
if: success() || failure()
with:
max-annotations: 50
name: Standard Library Tests (windows)
path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml
path-replace-backslashes: true
reporter: java-junit
- name: List files if failed (Windows)
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
run: Get-ChildItem -Force -Recurse
if: failure() && runner.os == 'Windows'
- name: List files if failed (non-Windows)
- if: failure() && runner.os != 'Windows'
name: List files if failed (non-Windows)
run: ls -lAR
if: failure() && runner.os != 'Windows'
- name: Clean after
- if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
name: Clean after
run: ./run git-clean
if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_BUILD_SKIP_VERSION_CHECK: "true"

9
.gitignore vendored
View File

@ -24,7 +24,12 @@ wasm-pack.log
generated/
/target
/build/rust/target/
dist
###########
## Scala ##
###########
.metals/
############
## NodeJS ##
@ -119,7 +124,7 @@ build-cache/
######################
## Enso-Development ##
######################
/dist
distribution/lib/Standard/Examples/*/data/scratch_file
distribution/lib/Standard/Examples/*/data/image.png
distribution/editions

983
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -6,6 +6,7 @@ members = [
"app/gui",
"app/gui/enso-profiler-enso-data",
"build/cli",
"build/macros",
"build/enso-formatter",
"build/intellij-run-config-gen",
"build/deprecated/rust-scripts",

15
build/base/Cargo.toml Normal file
View File

@ -0,0 +1,15 @@
[package]
name = "enso-build-base"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.65"
fn-error-context = "0.2.0"
futures-util = "0.3.24"
futures = "0.3.24"
serde = "1.0.145"
serde_json = "1.0.85"
serde_yaml = "0.9.13"
tracing = "0.1.36"

View File

@ -0,0 +1,16 @@
//! Additional convenience methods for various common types.
// ==============
// === Export ===
// ==============
pub mod from_string;
pub mod future;
pub mod iterator;
pub mod maps;
pub mod option;
pub mod path;
pub mod pathbuf;
pub mod result;
pub mod str;

View File

@ -1,3 +1,5 @@
//!Module with utilities for converting string-like values into other types.
use crate::prelude::*;
use anyhow::Context;
@ -5,9 +7,12 @@ use std::any::type_name;
/// An equivalent of standard's library `std::str::FromStr` trait, but with nice error messages.
pub trait FromString: Sized {
/// Parse a string into a value of this type. See: `std::str::FromStr::from_str`.
fn from_str(s: &str) -> Result<Self>;
/// Parse a string into a value of this type and then convert it to `R`.
fn parse_into<R>(text: impl AsRef<str>) -> Result<R>
where
Self: TryInto<R>,

View File

@ -1,3 +1,5 @@
//! Extensions to [`Future`]-related types.
use crate::prelude::*;
use futures_util::future::ErrInto;
@ -10,12 +12,12 @@ use futures_util::TryFutureExt as _;
fn void<T>(_t: T) {}
/// Extension methods for [`Future`].
pub trait FutureExt: Future {
/// Discard the result of this future.
fn void(self) -> Map<Self, fn(Self::Output) -> ()>
where Self: Sized {
self.map(void)
self.map(drop)
}
}
@ -24,12 +26,27 @@ impl<T: ?Sized> FutureExt for T where T: Future {}
type FlattenResultFn<T, E> =
fn(std::result::Result<std::result::Result<T, E>, E>) -> std::result::Result<T, E>;
/// Extension methods for [`TryFuture`], i.e. the Result-yielding [`Future`]
pub trait TryFutureExt: TryFuture {
/// Discard the result of successful future.
fn void_ok(self) -> MapOk<Self, fn(Self::Ok) -> ()>
where Self: Sized {
self.map_ok(void)
self.map_ok(drop)
}
/// Convert the error type of this future to [`anyhow::Error`] and add the context.
fn context(
self,
context: impl Display + Send + Sync + 'static,
) -> BoxFuture<'static, Result<Self::Ok>>
where
Self: Sized + Send + 'static,
Self::Error: Into<anyhow::Error> + Send + Sync + 'static,
{
self.map_err(|err| err.into().context(context)).boxed()
}
/// Convert the error type of this future to [`anyhow::Error`].
fn anyhow_err(self) -> MapErr<Self, fn(Self::Error) -> anyhow::Error>
where
Self: Sized,
@ -38,6 +55,7 @@ pub trait TryFutureExt: TryFuture {
self.map_err(anyhow::Error::from)
}
/// If the future is successful, apply the function to the result and return the new future.
fn and_then_sync<T2, E2, F>(
self,
f: F,
@ -53,16 +71,9 @@ pub trait TryFutureExt: TryFuture {
impl<T: ?Sized> TryFutureExt for T where T: TryFuture {}
pub fn receiver_to_stream<T>(
mut receiver: tokio::sync::mpsc::Receiver<T>,
) -> impl Stream<Item = T> {
futures::stream::poll_fn(move |ctx| receiver.poll_recv(ctx))
}
/// Extension methods for [`TryStream`], i.e. a [`Stream`] that produces [`Result`]s.
pub trait TryStreamExt: TryStream {
/// Wrap all the errors into [`anyhow::Error`].
fn anyhow_err(self) -> stream::MapErr<Self, fn(Self::Error) -> anyhow::Error>
where
Self: Sized,

View File

@ -1,3 +1,5 @@
//! Extension methods for `Iterator` and `Iterator`-like types.
use crate::prelude::*;
use std::iter::Rev;
@ -5,7 +7,10 @@ use std::iter::Take;
/// Extension methods for `Iterator` and `Iterator`-like types.
pub trait IteratorExt: Iterator {
/// try_filter
/// Transforms an [Iterator]'s items into `Result`s, and filters out the `Err` variants.
fn try_filter<R>(mut self, mut f: impl FnMut(&Self::Item) -> Result<bool>) -> Result<R>
where
Self: Sized,
@ -16,6 +21,7 @@ pub trait IteratorExt: Iterator {
})
}
/// Transforms an [Iterator]'s items into `Result`s, and filters out the `Err` variants.
fn try_map<R, U>(mut self, mut f: impl FnMut(Self::Item) -> Result<U>) -> Result<R>
where
Self: Sized,
@ -29,8 +35,12 @@ pub trait IteratorExt: Iterator {
impl<I: Iterator> IteratorExt for I {}
/// Extension methods for `Iterator` and `Iterator`-like types.s
pub trait TryIteratorExt: Iterator {
/// The result of successful iteration.
type Ok;
/// Collects the results of the iterator into a `Result<Vec<_>>`.
fn try_collect_vec(self) -> Result<Vec<Self::Ok>>;
}
@ -45,7 +55,9 @@ where
}
}
#[allow(missing_docs)]
pub trait ExactDoubleEndedIteratorExt: ExactSizeIterator + DoubleEndedIterator + Sized {
/// Take the last n elements of the iterator.
fn take_last_n(self, n: usize) -> Rev<Take<Rev<Self>>> {
self.rev().take(n).rev()
}

View File

@ -1,11 +1,12 @@
//! Extension methods for `HashMap` and `HashMap`-like types.
use crate::prelude::*;
use std::collections::HashMap;
// trait Foo<'a, K, V> = FnOnce(&'a K) -> Future<Output = Result<V>>;
/// Get the value for the given key, or insert the value generated by the given 'f` function.
pub async fn get_or_insert<K, V, F, R>(map: &mut HashMap<K, V>, key: K, f: F) -> Result<&V>
where
K: Eq + Hash,

View File

@ -0,0 +1,10 @@
//! Extension methods for `Option`.
// use crate::prelude::*;
/// Extension methods for `Option`.
pub trait OptionExt<T> {}
impl<T> OptionExt<T> for Option<T> {}

View File

@ -0,0 +1,184 @@
//! Extension methods for `Path` and `Path`-like types.1
use crate::prelude::*;
use serde::de::DeserializeOwned;
/// A number of extensions for `Path`-like types.
pub trait PathExt: AsRef<Path> {
/// Append multiple segments to this path.
fn join_iter<P: AsRef<Path>>(&self, segments: impl IntoIterator<Item = P>) -> PathBuf {
let mut ret = self.as_ref().to_path_buf();
ret.extend(segments);
ret
}
/// Strips the leading `\\?\` prefix from Windows paths if present.
fn without_verbatim_prefix(&self) -> &Path {
self.as_str().strip_prefix(r"\\?\").map_or(self.as_ref(), Path::new)
}
/// Appends a new extension to the file.
///
/// Does not try to replace previous extension, unlike `set_extension`.
/// Does nothing when given extension string is empty.
///
/// ```
/// use enso_build_base::extensions::path::PathExt;
/// use std::path::PathBuf;
///
/// let path = PathBuf::from("foo.tar").with_appended_extension("gz");
/// assert_eq!(path, PathBuf::from("foo.tar.gz"));
///
/// let path = PathBuf::from("foo").with_appended_extension("zip");
/// assert_eq!(path, PathBuf::from("foo.zip"));
/// ```
fn with_appended_extension(&self, extension: impl AsRef<OsStr>) -> PathBuf {
if extension.as_ref().is_empty() {
return self.as_ref().into();
} else {
let mut ret = self.as_ref().to_path_buf().into_os_string();
ret.push(".");
ret.push(extension.as_ref());
ret.into()
}
}
/// Parse this file's contents as a JSON-serialized value.
#[context("Failed to deserialize file `{}` as type `{}`.", self.as_ref().display(), std::any::type_name::<T>())]
fn read_to_json<T: DeserializeOwned>(&self) -> Result<T> {
let content = crate::fs::read_to_string(self)?;
serde_json::from_str(&content).with_context(|| format!("File content was: {}", content))
}
/// Write this file with a JSON-serialized value.
fn write_as_json<T: Serialize>(&self, value: &T) -> Result {
trace!("Writing JSON to {}.", self.as_ref().display());
let file = crate::fs::create(self)?;
serde_json::to_writer(file, value).anyhow_err()
}
/// Parse this file's contents as a YAML-serialized value.
fn read_to_yaml<T: DeserializeOwned>(&self) -> Result<T> {
let content = crate::fs::read_to_string(self)?;
serde_yaml::from_str(&content).anyhow_err()
}
/// Write this file with a YAML-serialized value.
fn write_as_yaml<T: Serialize>(&self, value: &T) -> Result {
trace!("Writing YAML to {}.", self.as_ref().display());
let file = crate::fs::create(self)?;
serde_yaml::to_writer(file, value).anyhow_err()
}
/// Get the path as `str`.
///
/// # Safety
/// This will panic if the path contains invalid UTF-8 characters. Non-UTF-8 paths are not
/// something that we want to spend time on supporting right now.
fn as_str(&self) -> &str {
self.as_ref()
.to_str()
.unwrap_or_else(|| panic!("Path is not valid UTF-8: {:?}", self.as_ref()))
}
/// Split path to components and collect them into a new PathBuf.
///
/// This is useful for `/` -> native separator conversion.
fn normalize(&self) -> PathBuf {
self.as_ref().components().collect()
}
/// Like `parent` but provides a sensible error message if the path has no parent.
fn try_parent(&self) -> Result<&Path> {
self.as_ref()
.parent()
.with_context(|| format!("Failed to get parent of path `{}`.", self.as_ref().display()))
}
/// Like `file_name` but provides a sensible error message if the path has no file name.
fn try_file_name(&self) -> Result<&OsStr> {
self.as_ref().file_name().with_context(|| {
format!("Failed to get file name of path `{}`.", self.as_ref().display())
})
}
/// Like `file_stem` but provides a sensible error message if the path has no file stem.
fn try_file_stem(&self) -> Result<&OsStr> {
self.as_ref().file_stem().with_context(|| {
format!("Failed to get file stem of path `{}`.", self.as_ref().display())
})
}
/// Like `extension` but provides a sensible error message if the path has no extension.
/// Note that this method fails for paths like `foo.`.
fn try_extension(&self) -> Result<&OsStr> {
self.as_ref().extension().with_context(|| {
format!("Failed to get extension of path `{}`.", self.as_ref().display())
})
}
/// Takes filename and splits it into file stem and extension.
///
/// Fails if the path's filename has no extension.
fn split_filename(&self) -> Result<SplitFilename> {
let stem = self.try_file_stem()?;
let extension = self.try_extension()?;
Ok(SplitFilename { stem, extension })
}
/// Returns the path with replaced parent. The filename is kept intact.
///
/// If there is no filename in the path, it is fully replaced.
fn with_parent(&self, parent: impl AsRef<Path>) -> PathBuf {
let mut ret = parent.as_ref().to_path_buf();
ret.extend(self.as_ref().file_name());
ret
}
}
impl<T: AsRef<Path>> PathExt for T {}
/// A method that displays a value using `Display` trait.
pub fn display_fmt(path: &Path, f: &mut Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(&path.display(), f)
}
/// A result of splitting a path into its filename components.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct SplitFilename<'a> {
/// The file stem.
pub stem: &'a OsStr,
/// The file extension.
pub extension: &'a OsStr,
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn stripping_unc_prefix() {
let path_with_unc = Path::new(r"\\?\H:\NBO\ci-build\target\debug\enso-build2.exe");
let path_without_unc = Path::new(r"H:\NBO\ci-build\target\debug\enso-build2.exe");
assert_eq!(path_with_unc.without_verbatim_prefix(), path_without_unc);
assert_eq!(path_without_unc.without_verbatim_prefix(), path_without_unc);
}
#[test]
/// This test just makes sure that usage of as_str correctly compiles without lifetime issues.
/// (there were such before)
fn foo() {
fn bar(path: impl AsRef<Path>) {
path.as_str();
path.as_ref().as_str();
}
bar("");
bar(String::from(""));
}
}

View File

@ -0,0 +1,22 @@
//! Extensions to the [`PathBuf`] type.
use crate::prelude::*;
/// Extension methods for [`PathBuf`].
pub trait PathBufExt {
/// Replace the [parent][std::path::Path::parent] directory of the path, maintaining the
/// [filename][std::path::Path::file_name].
fn set_parent(&mut self, parent: impl AsRef<Path>);
}
impl PathBufExt for PathBuf {
fn set_parent(&mut self, parent: impl AsRef<Path>) {
let parent = parent.as_ref();
let filename = self.file_name().map(ToOwned::to_owned);
self.clear();
self.push(parent);
self.extend(filename);
}
}

View File

@ -1,8 +1,12 @@
//! Extension methods for [`Result`].
use crate::prelude::*;
/// Extension methods for [`Result`].
pub trait ResultExt<T, E>: Sized {
/// Maps the value and wraps it as a [`Future`].
#[allow(clippy::type_complexity)]
fn map_async<'a, T2, F, Fut>(
self,
@ -15,6 +19,8 @@ pub trait ResultExt<T, E>: Sized {
F: FnOnce(T) -> Fut,
Fut: Future<Output = T2> + 'a;
/// Maps the `Ok` value to a [`Future`] value. If the result is `Err`, the error is returned
/// as a [`std::future::Ready`] future.
fn and_then_async<'a, T2, E2, F, Fut>(
self,
f: F,
@ -25,6 +31,22 @@ pub trait ResultExt<T, E>: Sized {
E: Into<E2>,
T2: Send + 'a,
E2: Send + 'a;
/// Convert the error type to [`anyhow::Error`].
///
/// If there are additional context-specific information, use [`context`] instead.
fn anyhow_err(self) -> Result<T>
where E: Into<anyhow::Error>;
/// Convert the `[Result]<[Future]>` to `Future<Result>`.
fn flatten_fut(
self,
) -> futures::future::Either<
std::future::Ready<std::result::Result<T::Ok, T::Error>>,
futures::future::IntoFuture<T>,
>
where T: TryFuture<Error: From<E>>;
}
impl<T, E> ResultExt<T, E> for std::result::Result<T, E> {
@ -62,4 +84,22 @@ impl<T, E> ResultExt<T, E> for std::result::Result<T, E> {
Err(e) => ready(Err(e.into())).right_future(),
}
}
fn anyhow_err(self) -> Result<T>
where E: Into<anyhow::Error> {
self.map_err(E::into)
}
fn flatten_fut(
self,
) -> futures::future::Either<
std::future::Ready<std::result::Result<T::Ok, T::Error>>,
futures::future::IntoFuture<T>,
>
where T: TryFuture<Error: From<E>> {
match self {
Ok(fut) => fut.into_future().right_future(),
Err(e) => ready(Err(T::Error::from(e))).left_future(),
}
}
}

View File

@ -0,0 +1,34 @@
//! Extensions fot string-like types.
use crate::prelude::*;
/// Extension methods for strings and similar types.
pub trait StrLikeExt {
/// Convenience variant of `FromString::from_str`.
///
/// Should be preferred over [`str::parse`] due to better error messages.
// FIXME: This needs better name! However, we cannot use `parse` as it conflicts with
// `str::parse`. As a method on `str`, it would take priority over an extension trait.
fn parse2<T: FromString>(&self) -> Result<T>;
/// Convenience variant of `FromString::parse_into`.
fn parse_through<T, R>(&self) -> Result<R>
where
T: FromString + TryInto<R>,
<T as TryInto<R>>::Error: Into<anyhow::Error>;
}
impl<S: AsRef<str>> StrLikeExt for S {
fn parse2<U: FromString>(&self) -> Result<U> {
U::from_str(self.as_ref())
}
fn parse_through<T, R>(&self) -> Result<R>
where
T: FromString + TryInto<R>,
<T as TryInto<R>>::Error: Into<anyhow::Error>, {
T::parse_into(self.as_ref())
}
}

221
build/base/src/fs.rs Normal file
View File

@ -0,0 +1,221 @@
//! Module meant as a replacement for [std::fs] module.
//!
//! Functionality and API-s are basically the same, but the functions are enriched with logging and
//! improved diagnostics. Most importantly, file operation failures will display the relevant path.
use crate::prelude::*;
use std::io::Write;
// ==============
// === Export ===
// ==============
pub mod wrappers;
pub use wrappers::*;
/// Like the standard version but will create any missing parent directories from the path.
#[context("Failed to write path: {}", path.as_ref().display())]
pub fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
create_parent_dir_if_missing(&path)?;
wrappers::write(&path, &contents)
}
/// Serialize the data to JSON text and write it to the file.
///
/// See [`write()`].
#[context("Failed to write path: {}", path.as_ref().display())]
pub fn write_json(path: impl AsRef<Path>, contents: &impl Serialize) -> Result {
let contents = serde_json::to_string(contents)?;
write(&path, &contents)
}
/// Like the standard version but will create any missing parent directories from the path.
#[context("Failed to open path for writing: {}", path.as_ref().display())]
pub fn create(path: impl AsRef<Path>) -> Result<std::fs::File> {
create_parent_dir_if_missing(&path)?;
wrappers::create(&path)
}
/// Read the file content and parse it using [`FromString`].
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read_string_into<T: FromString>(path: impl AsRef<Path>) -> Result<T> {
read_to_string(&path)?.parse2()
}
/// Create a directory (and all missing parent directories),
///
/// Does not fail when a directory already exists.
#[context("Failed to create directory {}", path.as_ref().display())]
pub fn create_dir_if_missing(path: impl AsRef<Path>) -> Result {
let result = std::fs::create_dir_all(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => Ok(()),
result => result.anyhow_err(),
}
}
/// Create a parent directory for path (and all missing parent directories),
///
/// Does not fail when a directory already exists.
#[context("Failed to create parent directory for {}", path.as_ref().display())]
pub fn create_parent_dir_if_missing(path: impl AsRef<Path>) -> Result<PathBuf> {
if let Some(parent) = path.as_ref().parent() {
create_dir_if_missing(parent)?;
Ok(parent.into())
} else {
bail!("No parent directory for path {}.", path.as_ref().display())
}
}
/// Remove a directory with all its subtree.
///
/// Does not fail if the directory is not found.
#[tracing::instrument(fields(path = %path.as_ref().display()))]
#[context("Failed to remove directory {}", path.as_ref().display())]
pub fn remove_dir_if_exists(path: impl AsRef<Path>) -> Result {
let result = std::fs::remove_dir_all(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
result => result.anyhow_err(),
}
}
/// Remove a regular file.
///
/// Does not fail if the file is not found.
#[tracing::instrument(fields(path = %path.as_ref().display()))]
#[context("Failed to remove file {}", path.as_ref().display())]
pub fn remove_file_if_exists(path: impl AsRef<Path>) -> Result<()> {
let result = std::fs::remove_file(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
result => result.anyhow_err(),
}
}
/// Remove a file being either directory or regular file..
///
/// Does not fail if the file is not found.
#[context("Failed to remove entry {} (if exists)", path.as_ref().display())]
pub fn remove_if_exists(path: impl AsRef<Path>) -> Result {
let path = path.as_ref();
if path.is_dir() {
remove_dir_if_exists(path)
} else {
remove_file_if_exists(path)
}
}
/// Recreate directory, so it exists and is empty.
pub fn reset_dir(path: impl AsRef<Path>) -> Result {
let path = path.as_ref();
debug!("Will reset directory {}", path.display());
remove_dir_if_exists(path)?;
create_dir_if_missing(path)?;
Ok(())
}
/// Fail if the given path does not exist.
pub fn require_exist(path: impl AsRef<Path>) -> Result {
if path.as_ref().exists() {
trace!("{} does exist.", path.as_ref().display());
Ok(())
} else {
bail!("{} does not exist.", path.as_ref().display())
}
}
/// Check if the both path are equal.
///
/// This performs canonicalization of the paths before comparing them. As such, it requires that the
/// both paths exist.
pub fn same_existing_path(source: impl AsRef<Path>, destination: impl AsRef<Path>) -> Result<bool> {
Ok(canonicalize(source)? == canonicalize(destination)?)
}
/// Fail if the given path is not an existing directory.
#[context("Failed because the path does not point to a directory: {}", path.as_ref().display())]
pub fn expect_dir(path: impl AsRef<Path>) -> Result {
let filetype = metadata(&path)?.file_type();
if filetype.is_dir() {
Ok(())
} else {
bail!("File is not directory, its type is: {filetype:?}")
}
}
/// Fail if the given path is not an existing file.
#[context("Failed because the path does not point to a regular file: {}", path.as_ref().display())]
pub fn expect_file(path: impl AsRef<Path>) -> Result {
let filetype = metadata(&path)?.file_type();
if filetype.is_file() {
Ok(())
} else {
bail!("File is not a regular file, its type is: {filetype:?}")
}
}
/// Change the file permissions, so the owner can execute it.
#[context("Failed to update permissions on `{}`", path.as_ref().display())]
pub fn allow_owner_execute(path: impl AsRef<Path>) -> Result {
#[cfg(not(target_os = "windows"))]
{
use std::os::unix::prelude::*;
debug!("Setting executable permission on {}", path.as_ref().display());
let metadata = path.as_ref().metadata()?;
let mut permissions = metadata.permissions();
let mode = permissions.mode();
let owner_can_execute = 0o0100;
permissions.set_mode(mode | owner_can_execute);
set_permissions(path.as_ref(), permissions)?;
}
Ok(())
}
/// Check if the files are of identical content.
pub fn check_if_identical(source: impl AsRef<Path>, target: impl AsRef<Path>) -> Result<bool> {
// Different length means different content, no need to read.
if metadata(&source)?.len() != metadata(&target)?.len() {
return Ok(false);
}
// TODO: Not good for large files, should process them chunk by chunk.
Ok(read(&source)? == read(&target)?)
}
/// Copy a file from source to target, unless they are already identical.
///
/// This is meant not really as an optimization for copying, but rather as a way to avoid
/// unnecessary file changes, which would trigger unnecessary rebuilds.
pub fn copy_file_if_different(source: impl AsRef<Path>, target: impl AsRef<Path>) -> Result {
if check_if_identical(&source, &target).contains(&true) {
trace!("Files are identical, not copying from {}.", source.as_ref().display());
} else {
trace!(
"Modified, will copy {} to {}.",
source.as_ref().display(),
target.as_ref().display()
);
copy(&source, &target)?;
}
Ok(())
}
/// Append contents to the file.
///
/// If the file does not exist, it will be created.
pub fn append(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
std::fs::OpenOptions::new()
.append(true)
.create(true)
.open(&path)
.context(format!("Failed to open {} for writing.", path.as_ref().display()))?
.write_all(contents.as_ref())
.context(format!("Failed to write to {}.", path.as_ref().display()))
}

View File

@ -0,0 +1,87 @@
//! Wrappers over [`std::fs`] functions that provide sensible error messages, i.e. explaining what
//! operation was attempted and what was the relevant path.
//!
//! Unless there is a specific reason to use the standard library functions, you should use these.
use crate::prelude::*;
use std::fs::File;
use std::fs::Metadata;
// ==============
// === Export ===
// ==============
/// See [std::fs::metadata].
#[context("Failed to obtain metadata for file: {}", path.as_ref().display())]
pub fn metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> {
std::fs::metadata(&path).anyhow_err()
}
/// See [std::fs::copy].
#[context("Failed to copy file from {} to {}", from.as_ref().display(), to.as_ref().display())]
pub fn copy(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<u64> {
std::fs::copy(&from, &to).anyhow_err()
}
/// See [std::fs::rename].
#[context("Failed to rename file from {} to {}", from.as_ref().display(), to.as_ref().display())]
pub fn rename(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result {
std::fs::rename(&from, &to).anyhow_err()
}
/// See [std::fs::read].
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
std::fs::read(&path).anyhow_err()
}
/// See [std::fs::read_dir].
#[context("Failed to read the directory: {}", path.as_ref().display())]
pub fn read_dir(path: impl AsRef<Path>) -> Result<std::fs::ReadDir> {
std::fs::read_dir(&path).anyhow_err()
}
/// See [std::fs::read_to_string].
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
std::fs::read_to_string(&path).anyhow_err()
}
/// See [std::fs::write].
#[context("Failed to write path: {}", path.as_ref().display())]
pub fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
std::fs::write(&path, contents).anyhow_err()
}
/// See [std::fs::File::open].
#[context("Failed to open path for reading: {}", path.as_ref().display())]
pub fn open(path: impl AsRef<Path>) -> Result<File> {
File::open(&path).anyhow_err()
}
/// See [std::fs::File::create].
#[context("Failed to open path for writing: {}", path.as_ref().display())]
pub fn create(path: impl AsRef<Path>) -> Result<File> {
File::create(&path).anyhow_err()
}
/// See [std::fs::canonicalize].
#[context("Failed to canonicalize path: {}", path.as_ref().display())]
pub fn canonicalize(path: impl AsRef<Path>) -> Result<PathBuf> {
std::fs::canonicalize(&path).anyhow_err()
}
/// See [std::fs::create_dir_all].
#[context("Failed to create missing directories no path: {}", path.as_ref().display())]
pub fn create_dir_all(path: impl AsRef<Path>) -> Result {
std::fs::create_dir_all(&path).anyhow_err()
}
/// See [std::fs::set_permissions].
#[context("Failed to permissions on file: {}", path.as_ref().display())]
pub fn set_permissions(path: impl AsRef<Path>, perm: std::fs::Permissions) -> Result {
std::fs::set_permissions(&path, perm).anyhow_err()
}

112
build/base/src/lib.rs Normal file
View File

@ -0,0 +1,112 @@
//! This crate is meant to provide a foundational set of utilities and reexports, that should be
//! common for the whole Enso codebase. Eventually both WASM and native code should use this crate.
//!
//! Currently it is employed by the native build scripts code.
// === Features ===
#![feature(pin_macro)]
#![feature(default_free_fn)]
#![feature(result_flattening)]
#![feature(associated_type_bounds)]
#![feature(extend_one)]
#![feature(option_result_contains)]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
#![allow(clippy::bool_to_int_with_if)]
#![allow(clippy::let_and_return)]
// === Non-Standard Linter Configuration ===
#![warn(missing_docs)]
// ==============
// === Export ===
// ==============
pub mod extensions;
pub mod fs;
pub mod prelude {
//! This module contains all the reexports of the most common traits and types used in the
//! Enso codebase.
/// anyhow-based result type.
pub type Result<T = ()> = anyhow::Result<T>;
pub use std::borrow::Borrow;
pub use std::borrow::BorrowMut;
pub use std::borrow::Cow;
pub use std::collections::BTreeMap;
pub use std::collections::BTreeSet;
pub use std::collections::HashMap;
pub use std::collections::HashSet;
pub use std::default::default;
pub use std::ffi::OsStr;
pub use std::ffi::OsString;
pub use std::fmt::Debug;
pub use std::fmt::Display;
pub use std::fmt::Formatter;
pub use std::future::ready;
pub use std::future::Future;
pub use std::hash::Hash;
pub use std::io::Read;
pub use std::io::Seek;
pub use std::iter::once;
pub use std::iter::FromIterator;
pub use std::marker::PhantomData;
pub use std::ops::Deref;
pub use std::ops::DerefMut;
pub use std::ops::Range;
pub use std::path::Path;
pub use std::path::PathBuf;
pub use std::pin::pin;
pub use std::pin::Pin;
pub use std::sync::Arc;
pub use crate::extensions::from_string::FromString;
pub use crate::extensions::future::FutureExt as _;
pub use crate::extensions::future::TryFutureExt as _;
pub use crate::extensions::iterator::IteratorExt as _;
pub use crate::extensions::iterator::TryIteratorExt as _;
pub use crate::extensions::option::OptionExt as _;
pub use crate::extensions::path::PathExt as _;
pub use crate::extensions::pathbuf::PathBufExt as _;
pub use crate::extensions::result::ResultExt as _;
pub use crate::extensions::str::StrLikeExt as _;
pub use anyhow::anyhow;
pub use anyhow::bail;
pub use anyhow::ensure;
pub use anyhow::Context as _;
pub use fn_error_context::context;
pub use futures_util::future::BoxFuture;
pub use futures_util::select;
pub use futures_util::stream::BoxStream;
pub use futures_util::try_join;
pub use futures_util::AsyncWrite;
pub use futures_util::FutureExt as _;
pub use futures_util::Stream;
pub use futures_util::StreamExt as _;
pub use futures_util::TryFuture;
pub use futures_util::TryFutureExt as _;
pub use futures_util::TryStream;
pub use futures_util::TryStreamExt as _;
pub use serde::de::DeserializeOwned;
pub use serde::Deserialize;
pub use serde::Serialize;
pub use tracing::debug;
pub use tracing::debug_span;
pub use tracing::error;
pub use tracing::error_span;
pub use tracing::info;
pub use tracing::info_span;
pub use tracing::instrument;
pub use tracing::span;
pub use tracing::trace;
pub use tracing::trace_span;
pub use tracing::warn;
pub use tracing::warn_span;
pub use tracing::Instrument;
}

View File

@ -32,14 +32,15 @@ glob = "0.3.0"
#handlebars = "4.2.1"
heck = "0.4.0"
humantime = "2.1.0"
enso-build-base = { path = "../base" }
ide-ci = { path = "../ci_utils" }
indexmap = "1.7.0"
indicatif = "0.17.1"
itertools = "0.10.1"
lazy_static = "1.4.0"
#git2 = "0.13.25"
log = "0.4.14"
mime = "0.3.16"
new_mime_guess = "4.0.1"
nix = { workspace = true }
octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [
"rustls"
@ -47,6 +48,7 @@ octocrab = { git = "https://github.com/enso-org/octocrab", default-features = fa
ouroboros = "0.15.0"
paste = "1.0.7"
path-absolutize = "3.0.11"
path-slash = "0.2.1"
platforms = { version = "3.0.0", features = ["serde"] }
pin-project = "1.0.8"
port_check = "0.1.5"

View File

@ -1,7 +1,7 @@
use enso_build::prelude::*;
use enso_build::setup_octocrab;
use ide_ci::models::config::RepoContext;
use ide_ci::github::Repo;
use octocrab::models::ReleaseId;
@ -9,9 +9,10 @@ use octocrab::models::ReleaseId;
#[tokio::main]
async fn main() -> Result {
let octo = setup_octocrab().await?;
let repo = RepoContext::from_str("enso-org/enso-staging")?;
let handler = repo.repos(&octo);
let releases = handler.releases();
let repo = Repo::from_str("enso-org/enso-staging")?;
let handler = repo.handle(&octo);
let repos = handler.repos();
let releases = repos.releases();
let release = releases.get_by_id(ReleaseId(59585385)).await?;
dbg!(&release);

View File

@ -3,7 +3,7 @@ use enso_build::prelude::*;
use aws_sdk_s3::model::ObjectCannedAcl;
use aws_sdk_s3::types::ByteStream;
use aws_sdk_s3::Client;
use enso_build::aws::BucketContext;
use enso_build::aws::s3::BucketContext;
use enso_build::aws::EDITIONS_BUCKET_NAME;
@ -15,7 +15,7 @@ async fn main() -> Result {
client: Client::new(&config),
bucket: EDITIONS_BUCKET_NAME.to_string(),
upload_acl: ObjectCannedAcl::PublicRead,
key_prefix: "enso".into(),
key_prefix: Some("enso".into()),
};
// std::env::set_var("AWS_SECRET_ACCESS_KEY", std::env::var("AWS_SECRET_ACCESS_KEY")?.trim());

View File

@ -8,6 +8,7 @@
changelog.yml:
gui.yml:
nightly.yml:
release.yml:
scala-new.yml:
app/:
gui/:
@ -26,15 +27,13 @@
"project-manager-bundle-<triple>":
enso:
dist/:
bin/:
client/:
content/:
gui/:
assets/:
package.json:
preload.js:
icons/:
project-manager/:
tmp/:
ide.wasm:
index.js:
style.css:
wasm_imports.js:
# Final WASM artifacts in `dist` directory.
wasm/:
? path: ide.wasm
@ -43,13 +42,15 @@
var: wasm_main_raw
? path: ide.js
var: wasm_glue
init:
build-init:
build.json:
distribution/:
editions/:
<edition>.yaml:
engine/:
runner-native/:
src/:
test/:
resources/:
Factorial.enso:
runtime/:
target/:
bench-report.xml:
@ -68,6 +69,7 @@
simple-library-server/:
build.sbt:
run:
runner: # The runner native image (Linux only).
CHANGELOG.md:
project-manager/:

View File

@ -1,14 +1,11 @@
use crate::prelude::*;
use crate::version::BuildKind;
use crate::version::Kind;
use anyhow::Context;
use aws_sdk_s3::model::ObjectCannedAcl;
use aws_sdk_s3::output::PutObjectOutput;
use aws_sdk_s3::types::ByteStream;
use bytes::Buf;
use ide_ci::models::config::RepoContext;
use serde::de::DeserializeOwned;
use s3::BucketContext;
// ==============
@ -16,6 +13,7 @@ use serde::de::DeserializeOwned;
// ==============
pub mod ecr;
pub mod s3;
@ -49,7 +47,7 @@ impl Edition {
self.0.contains("nightly")
|| Version::find_in_text(self)
.as_ref()
.map_or(false, |version| BuildKind::Nightly.matches(version))
.map_or(false, |version| Kind::Nightly.matches(version))
}
}
@ -87,56 +85,12 @@ impl Manifest {
}
}
#[derive(Clone, Debug)]
pub struct BucketContext {
pub client: aws_sdk_s3::Client,
pub bucket: String,
pub upload_acl: ObjectCannedAcl,
pub key_prefix: String,
}
impl BucketContext {
pub async fn get(&self, path: &str) -> Result<ByteStream> {
Ok(self
.client
.get_object()
.bucket(&self.bucket)
.key(format!("{}/{}", self.key_prefix, path))
.send()
.await?
.body)
}
pub async fn put(&self, path: &str, data: ByteStream) -> Result<PutObjectOutput> {
dbg!(self
.client
.put_object()
.bucket(&self.bucket)
.acl(self.upload_acl.clone())
.key(format!("{}/{}", self.key_prefix, path))
.body(data))
.send()
.await
.anyhow_err()
}
pub async fn get_yaml<T: DeserializeOwned>(&self, path: &str) -> Result<T> {
let text = self.get(path).await?.collect().await?;
serde_yaml::from_reader(text.reader()).anyhow_err()
}
pub async fn put_yaml(&self, path: &str, data: &impl Serialize) -> Result<PutObjectOutput> {
let buf = serde_yaml::to_string(data)?;
self.put(path, ByteStream::from(buf.into_bytes())).await
}
}
pub async fn update_manifest(repo_context: &RepoContext, edition_file: &Path) -> Result {
pub async fn update_manifest(repo_context: &impl IsRepo, edition_file: &Path) -> Result {
let bucket_context = BucketContext {
client: aws_sdk_s3::Client::new(&aws_config::load_from_env().await),
bucket: EDITIONS_BUCKET_NAME.to_string(),
upload_acl: ObjectCannedAcl::PublicRead,
key_prefix: repo_context.name.clone(),
key_prefix: Some(repo_context.name().to_string()),
};
let new_edition_name = Edition(

View File

@ -12,7 +12,8 @@ pub mod runtime;
#[instrument(skip(client))]
/// Lookup the repository by name.
#[instrument(skip(client), err)]
pub async fn resolve_repository(
client: &aws_sdk_ecr::Client,
repository_name: &str,
@ -23,10 +24,11 @@ pub async fn resolve_repository(
.repositories
.context("Missing repositories information.")?
.pop()
.context(format!("Cannot find repository {repository_name} in the registry."))
.with_context(|| format!("Cannot find repository {repository_name} in the registry."))
}
#[instrument(skip(client))]
/// Generate an authentication token for the repository.
#[instrument(skip(client), err)]
pub async fn get_credentials(client: &aws_sdk_ecr::Client) -> Result<docker::Credentials> {
let token = client.get_authorization_token().send().await?;
let auth_data = token
@ -47,6 +49,7 @@ pub async fn get_credentials(client: &aws_sdk_ecr::Client) -> Result<docker::Cre
Ok(docker::Credentials::new(*username, *password, proxy))
}
/// Get a repository URI, that can be used to refer to the repository in the Docker commands.
#[instrument(skip(client), ret)]
pub async fn get_repository_uri(
client: &aws_sdk_ecr::Client,
@ -56,3 +59,9 @@ pub async fn get_repository_uri(
let repository_uri = repository.repository_uri().context("Missing repository URI.")?;
Ok(repository_uri.into())
}
/// Create a new ECR client, configured using the environment variables.
pub async fn client_from_env() -> aws_sdk_ecr::Client {
let config = aws_config::load_from_env().await;
aws_sdk_ecr::Client::new(&config)
}

View File

@ -1,3 +1,5 @@
//! This module contains data necessary to deploy Enso Runtime to the cloud.
use crate::prelude::*;
use crate::paths::generated;
@ -8,11 +10,13 @@ use ide_ci::programs::Docker;
/// Name of the repository.
/// Name of the ECR repository with Runtime images.
pub const NAME: &str = "runtime";
/// Region where the ECR repository with Runtime images is located.
pub const REGION: &str = "eu-west-1";
/// Build the Runtime Docker image from the Engine package.
#[instrument(fields(%dockerfile, %engine_package_root))]
pub async fn build_runtime_image(
dockerfile: generated::RepoRootToolsCiDocker,

198
build/build/src/aws/s3.rs Normal file
View File

@ -0,0 +1,198 @@
//! Code supporting our S3 operations.
use crate::prelude::*;
use aws_sdk_s3::model::ObjectCannedAcl;
use aws_sdk_s3::output::PutObjectOutput;
use aws_sdk_s3::types::ByteStream;
use bytes::Buf;
use enso_build_base::extensions::path::SplitFilename;
use mime::Mime;
// ==============
// === Export ===
// ==============
pub mod gui;
/// Construct client from the environment.
pub async fn client_from_env() -> aws_sdk_s3::Client {
aws_sdk_s3::Client::new(&aws_config::load_from_env().await)
}
/// Everything we need to get/put files to S3.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
pub struct BucketContext {
#[derivative(Debug = "ignore")]
pub client: aws_sdk_s3::Client,
pub bucket: String,
pub upload_acl: ObjectCannedAcl,
/// Prefix that will be prepended to the object key.
pub key_prefix: Option<String>,
}
impl BucketContext {
pub fn key(&self, path: impl AsRef<Path>) -> String {
let path = path.as_ref();
let normalized = path_slash::PathExt::to_slash_lossy(path);
if let Some(prefix) = &self.key_prefix {
format!("{}/{}", prefix, normalized)
} else {
normalized.into()
}
}
pub async fn get(&self, path: &str) -> Result<ByteStream> {
trace!("Downloading {path} at {self:?}.");
Ok(self
.client
.get_object()
.bucket(&self.bucket)
.key(self.key(path))
.send()
.await
.with_context(|| {
format!("Failed to download {} from S3 bucket {}.", self.key(path), self.bucket)
})?
.body)
}
pub async fn put(&self, path: &str, data: ByteStream) -> Result<PutObjectOutput> {
trace!("Uploading {path} at {self:?}.");
let mut request = self
.client
.put_object()
.bucket(&self.bucket)
.acl(self.upload_acl.clone())
.key(self.key(path))
.body(data);
// Cloud requested us to set content encoding and type.
let content_headers = ContentHeaders::from_path(path);
request = content_headers.apply(request);
request.send().await.with_context(|| {
format!("Failed to upload {} to S3 bucket {}.", self.key(path), self.bucket)
})
}
#[instrument(fields(path = %path.as_ref().display()))]
pub async fn put_file(&self, path: impl AsRef<Path>) -> Result<PutObjectOutput> {
let path = path.as_ref();
let stream = ByteStream::from_path(path).await?;
let path = path.file_name().with_context(|| format!("Path {:?} has no file name", path))?;
self.put(path.as_str(), stream).await
}
pub async fn get_yaml<T: DeserializeOwned>(&self, path: &str) -> Result<T> {
let text = self.get(path).await?.collect().await?;
serde_yaml::from_reader(text.reader()).anyhow_err()
}
pub async fn put_yaml(&self, path: &str, data: &impl Serialize) -> Result<PutObjectOutput> {
let buf = serde_yaml::to_string(data)?;
self.put(path, ByteStream::from(buf.into_bytes())).await
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ContentEncoding {
Gzip,
}
impl ContentEncoding {
pub fn from_ext(ext: &str) -> Result<Self> {
match ext {
"gz" => Ok(Self::Gzip),
_ => bail!("Cannot recognize content encoding from extension: {}", ext),
}
}
}
/// Describe additional content-related headers that we might want to set.
#[derive(Clone, Debug)]
pub struct ContentHeaders {
/// Encoding of the content. Typically compression, if any.
pub content_encoding: Option<ContentEncoding>,
/// MIME type of the content.
pub content_type: Mime,
}
impl Default for ContentHeaders {
fn default() -> Self {
Self { content_encoding: None, content_type: mime::APPLICATION_OCTET_STREAM }
}
}
impl ContentHeaders {
pub fn new(content_type: Mime) -> Self {
Self { content_type, ..default() }
}
pub fn content_encoding(&self) -> Option<&'static str> {
self.content_encoding.as_ref().map(|enc| match enc {
ContentEncoding::Gzip => "gzip",
})
}
pub fn from_path(path: impl AsRef<Path>) -> Self {
let Ok(SplitFilename{ extension: outermost_extension, stem}) = path.split_filename() else {
// No extension, use defaults.
return default()
};
let Ok(next_extension) = stem.try_extension() else {
// Only one extension, use primary MIME.
let content_type = new_mime_guess::from_ext(outermost_extension.as_str()).first_or_octet_stream();
return Self::new(content_type)
};
if let Ok(content_encoding) = ContentEncoding::from_ext(outermost_extension.as_str()) {
// Two extensions, use primary MIME and encoding.
let content_type =
new_mime_guess::from_ext(next_extension.as_str()).first_or_octet_stream();
Self { content_encoding: Some(content_encoding), content_type }
} else {
// No encoding, use primary MIME.
let content_type =
new_mime_guess::from_ext(outermost_extension.as_str()).first_or_octet_stream();
Self::new(content_type)
}
}
pub fn apply(
&self,
mut request: aws_sdk_s3::client::fluent_builders::PutObject,
) -> aws_sdk_s3::client::fluent_builders::PutObject {
if let Some(content_encoding) = self.content_encoding() {
request = request.content_encoding(content_encoding);
}
request.content_type(&self.content_type.to_string())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn deduce_content_headers() -> Result {
fn case(path: &str, expected_encoding: Option<&str>, expected_type: &str) {
let headers = ContentHeaders::from_path(path);
assert_eq!(headers.content_encoding(), expected_encoding);
assert_eq!(headers.content_type.to_string().as_str(), expected_type);
}
case("wasm_imports.js.gz", Some("gzip"), "application/javascript");
case("index.js", None, "application/javascript");
case("style.css", None, "text/css");
case("ide.wasm", None, "application/wasm");
case("ide.wasm.gz", Some("gzip"), "application/wasm");
Ok(())
}
}

View File

@ -0,0 +1,36 @@
use crate::prelude::*;
use crate::aws::s3::BucketContext;
use aws_config::meta::region::RegionProviderChain;
/// AWS Region of the `ensocdn` bucket.
pub const BUCKET_REGION: &str = "us-west-1";
/// The bucket where the GUI releases are stored.
pub const BUCKET: &str = "ensocdn";
/// As default but with the region resolution fallback.
///
/// We do know the region, so we should not require it. Still, it is allowed to overwrite it through
/// the environment.
pub async fn client_from_env() -> Result<aws_sdk_s3::Client> {
let region = RegionProviderChain::default_provider().or_else(BUCKET_REGION);
let config = aws_config::from_env().region(region).load().await;
let client = aws_sdk_s3::Client::new(&config);
Ok(client)
}
/// Construct a context for handling a given GUI version release.
///
/// Requires AWS credentials in the environment.
pub async fn context(version: &Version) -> Result<BucketContext> {
Ok(BucketContext {
client: client_from_env().await?,
bucket: BUCKET.to_string(),
upload_acl: aws_sdk_s3::model::ObjectCannedAcl::PublicRead,
key_prefix: Some(format!("ide/{version}")),
})
}

View File

@ -4,7 +4,7 @@ use crate::ci::labels::NO_CHANGELOG_CHECK;
use crate::paths::generated::RepoRoot;
use ide_ci::actions::workflow::MessageLevel;
use ide_ci::programs::Git;
use ide_ci::programs::git;
@ -49,7 +49,7 @@ pub async fn check(repo_path: RepoRoot, context: ide_ci::actions::Context) -> Re
let repository = context.payload.repository.context("Missing repository information.")?;
let default_branch =
repository.default_branch.context("Missing default branch information.")?;
let git = Git::new(&repo_path).await?;
let git = git::Context::new(&repo_path).await?;
git.fetch_branch(REMOTE_NAME, &default_branch).await?;
let remote_base = format!("{REMOTE_NAME}/{default_branch}");
let files_changed = git.diff_against(remote_base).await?;

View File

@ -51,29 +51,23 @@ pub struct ConfigRaw {
/// The configuration of the script that is being provided by the external environment.
///
/// In our case, it is usually a configuration file in the main repository.
#[derive(Clone, Debug, Default)]
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct Config {
pub wasm_size_limit: Option<Byte>,
pub required_versions: HashMap<RecognizedProgram, VersionReq>,
}
impl Config {
/// Check whether all the required programs are available and have the required versions.
pub async fn check_programs(&self) -> Result {
for (program, version_req) in &self.required_versions {
let found = program.version().await?;
if !version_req.matches(&found) {
bail!(
"Found program {} in version {} that does not fulfill requirement {}.",
program,
found,
version_req
);
} else {
info!(
"Found program {} in supported version {} (required {}).",
program, found, version_req
);
}
let check_tasks = self
.required_versions
.iter()
.map(|(program, version_req)| check_program(program, version_req));
let results = futures::future::join_all(check_tasks).await;
let errors = results.into_iter().filter_map(Result::err).collect_vec();
if !(errors.is_empty()) {
bail!("Some required programs are not available or have wrong versions: {errors:?}")
}
Ok(())
}
@ -101,10 +95,40 @@ impl TryFrom<ConfigRaw> for Config {
}
}
/// Check if the given program is installed in the system and has the required version.
pub async fn check_program(program: &RecognizedProgram, version_req: &VersionReq) -> Result {
let found = program.version().await?;
if !version_req.matches(&found) {
bail!(
"Found program `{}` in version `{}` that does not fulfill requirement `{}`.",
program,
found,
version_req
);
} else {
info!(
"Found program `{}` in supported version `{}` (required `{}`).",
program, found, version_req
);
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use ide_ci::log::setup_logging;
use ide_ci::programs::Node;
#[tokio::test]
async fn check_node_version() -> Result {
setup_logging()?;
let version = Node.parse_version("v16.13.2")?;
let requirement = VersionReq::parse("=16.15.0")?;
assert!(!requirement.matches(&version));
Ok(())
}
#[tokio::test]
#[ignore]
@ -125,4 +149,26 @@ required-versions:
Ok(())
}
#[tokio::test]
async fn deserialize_config_in_repo() -> Result {
setup_logging()?;
// let config = include_str!("../../../build-config.yaml");
let config = r#"# Options intended to be common for all developers.
wasm-size-limit: 15.25 MiB
required-versions:
cargo-watch: ^8.1.1
node: =16.15.0
wasm-pack: ^0.10.2
# TODO [mwu]: Script can install `flatc` later on (if `conda` is present), so this is not required. However it should
# be required, if `conda` is missing.
# flatc: =1.12.0
"#;
let config = serde_yaml::from_str::<ConfigRaw>(config)?;
dbg!(&config);
dbg!(Config::try_from(config))?;
Ok(())
}
}

View File

@ -3,8 +3,8 @@ use crate::prelude::*;
use crate::paths::TargetTriple;
use derivative::Derivative;
use ide_ci::models::config::RepoContext;
use ide_ci::programs::Git;
use ide_ci::github;
use ide_ci::programs::git;
use octocrab::models::repos::Release;
use octocrab::models::ReleaseId;
@ -25,7 +25,7 @@ pub struct BuildContext {
/// Remote repository is used for release-related operations. This also includes deducing a new
/// version number.
pub remote_repo: RepoContext,
pub remote_repo: ide_ci::github::Repo,
}
impl BuildContext {
@ -34,7 +34,7 @@ impl BuildContext {
async move {
match ide_ci::actions::env::GITHUB_SHA.get() {
Ok(commit) => Ok(commit),
Err(_e) => Git::new(root).await?.head_hash().await,
Err(_e) => git::Context::new(root).await?.head_hash().await,
}
}
.boxed()
@ -45,18 +45,16 @@ impl BuildContext {
&self,
designator: String,
) -> BoxFuture<'static, Result<Release>> {
let repository = self.remote_repo.clone();
let octocrab = self.octocrab.clone();
let repository = self.remote_repo_handle();
let designator_cp = designator.clone();
async move {
let release = if let Ok(id) = designator.parse2::<ReleaseId>() {
repository.find_release_by_id(&octocrab, id).await?
repository.find_release_by_id(id).await?
} else {
match designator.as_str() {
"latest" => repository.latest_release(&octocrab).await?,
"nightly" =>
crate::version::latest_nightly_release(&octocrab, &repository).await?,
tag => repository.find_release_by_text(&octocrab, tag).await?,
"latest" => repository.latest_release().await?,
"nightly" => crate::version::latest_nightly_release(&repository).await?,
tag => repository.find_release_by_text(tag).await?,
}
};
Ok(release)
@ -66,4 +64,8 @@ impl BuildContext {
})
.boxed()
}
pub fn remote_repo_handle(&self) -> github::repo::Handle<github::Repo> {
github::repo::Handle::new(&self.octocrab, self.remote_repo.clone())
}
}

View File

@ -7,7 +7,7 @@ use crate::paths::ComponentPaths;
use crate::paths::Paths;
use ide_ci::future::AsyncPolicy;
use ide_ci::models::config::RepoContext;
use ide_ci::github::Repo;
use std::collections::BTreeSet;
@ -191,7 +191,7 @@ pub enum ReleaseCommand {
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct ReleaseOperation {
pub command: ReleaseCommand,
pub repo: RepoContext,
pub repo: Repo,
}
#[derive(Clone, PartialEq, Eq, Debug)]

View File

@ -19,11 +19,10 @@ use crate::paths::cache_directory;
use crate::paths::Paths;
use crate::paths::TargetTriple;
use crate::project::ProcessWrapper;
use crate::retrieve_github_access_token;
use ide_ci::actions::workflow::is_in_env;
use ide_ci::cache;
use ide_ci::env::Variable;
use ide_ci::github::release::IsReleaseExt;
use ide_ci::platform::DEFAULT_SHELL;
use ide_ci::programs::graal;
use ide_ci::programs::sbt;
@ -88,7 +87,7 @@ impl RunContext {
Sbt.require_present().await?;
// Other programs.
ide_ci::programs::Git::new_current().await?.require_present().await?;
ide_ci::programs::Git.require_present().await?;
ide_ci::programs::Go.require_present().await?;
ide_ci::programs::Cargo.require_present().await?;
ide_ci::programs::Node.require_present().await?;
@ -97,7 +96,7 @@ impl RunContext {
let prepare_simple_library_server = {
if self.config.test_scala {
let simple_server_path = &self.paths.repo_root.tools.simple_library_server;
ide_ci::programs::Git::new(simple_server_path)
ide_ci::programs::git::Context::new(simple_server_path)
.await?
.cmd()?
.clean()
@ -122,7 +121,8 @@ impl RunContext {
// TODO: After flatc version is bumped, it should be possible to get it without `conda`.
// See: https://www.pivotaltracker.com/story/show/180303547
if let Err(e) = Flatc.require_present_at(&FLATC_VERSION).await {
debug!("Cannot find expected flatc: {}", e);
warn!("Cannot find expected flatc: {}", e);
warn!("Will try to install it using conda. In case of issues, please install flatc manually, to avoid dependency on conda.");
// GitHub-hosted runner has `conda` on PATH but not things installed by it.
// It provides `CONDA` variable pointing to the relevant location.
if let Some(conda_path) = std::env::var_os("CONDA").map(PathBuf::from) {
@ -142,13 +142,14 @@ impl RunContext {
Flatc.lookup()?;
}
let _ = self.paths.emit_env_to_actions(); // Ignore error: we might not be run on CI.
self.paths.emit_env_to_actions().await?; // Ignore error: we might not be run on CI.
debug!("Build configuration: {:#?}", self.config);
// Setup Tests on Windows
if TARGET_OS == OS::Windows {
env::CiTestTimeFactor.set(&2);
env::CiFlakyTestEnable.set(&true);
let default_time_factor: usize = 2;
env::CI_TEST_TIMEFACTOR.set(&default_time_factor)?;
env::CI_TEST_FLAKY_ENABLE.set(&true)?;
}
// TODO [mwu]
@ -409,11 +410,22 @@ impl RunContext {
}
// if build_native_runner {
// Command::new("./runner")
// .current_dir(&self.repo_root)
// .args(["--run", "./engine/runner-native/src/test/resources/Factorial.enso"])
// .run_ok()
// let factorial_input = "6";
// let factorial_expected_output = "720";
// let output = Command::new(&self.repo_root.runner)
// .args([
// "--run",
//
// self.repo_root.engine.runner_native.src.test.resources.factorial_enso.as_str(),
// factorial_input,
// ])
// .env(ENSO_DATA_DIRECTORY.name(), &self.paths.engine.dir)
// .run_stdout()
// .await?;
// ensure!(
// output.contains(factorial_expected_output),
// "Native runner output does not contain expected result."
// );
// }
// Verify License Packages in Distributions
@ -481,28 +493,27 @@ impl RunContext {
}
pub async fn execute(&self, operation: Operation) -> Result {
match &operation {
match operation {
Operation::Release(ReleaseOperation { command, repo }) => match command {
ReleaseCommand::Upload => {
let artifacts = self.build().await?;
// Make packages.
let release_id = crate::env::ReleaseId.fetch()?;
let client = ide_ci::github::create_client(retrieve_github_access_token()?)?;
let upload_asset = |asset: PathBuf| {
ide_ci::github::release::upload_asset(repo, &client, release_id, asset)
};
let release_id = crate::env::ENSO_RELEASE_ID.get()?;
let release = ide_ci::github::release::ReleaseHandle::new(
&self.inner.octocrab,
repo,
release_id,
);
for package in artifacts.packages.into_iter() {
package.pack().await?;
upload_asset(package.artifact_archive).await?;
release.upload_asset_file(package.artifact_archive).await?;
}
for bundle in artifacts.bundles.into_iter() {
bundle.pack().await?;
upload_asset(bundle.artifact_archive).await?;
release.upload_asset_file(bundle.artifact_archive).await?;
}
if TARGET_OS == OS::Linux {
upload_asset(self.paths.manifest_file()).await?;
upload_asset(self.paths.launcher_manifest_file()).await?;
release.upload_asset_file(self.paths.manifest_file()).await?;
release.upload_asset_file(self.paths.launcher_manifest_file()).await?;
}
}
},

View File

@ -2,20 +2,14 @@
//use crate::prelude::*;
use ide_ci::env::Variable;
use ide_ci::define_env_var;
#[derive(Clone, Copy, Debug)]
pub struct CiTestTimeFactor;
impl Variable for CiTestTimeFactor {
const NAME: &'static str = "CI_TEST_TIMEFACTOR";
type Value = usize;
}
#[derive(Clone, Copy, Debug)]
pub struct CiFlakyTestEnable;
impl Variable for CiFlakyTestEnable {
const NAME: &'static str = "CI_TEST_FLAKY_ENABLE";
type Value = bool;
define_env_var! {
/// Factor applied to timeouts in tests. 1.0 means no change, 2.0 means double the timeout.
CI_TEST_TIMEFACTOR, usize;
/// Whether flaku tests should be run.
CI_TEST_FLAKY_ENABLE, bool;
}

View File

@ -5,7 +5,6 @@ use crate::postgres;
use crate::postgres::EndpointConfiguration;
use crate::postgres::Postgresql;
use ide_ci::env::Variable;
use ide_ci::future::AsyncPolicy;
use ide_ci::programs::docker::ContainerId;
@ -59,6 +58,12 @@ impl BuiltEnso {
Ok(command)
}
pub fn repl(&self) -> Result<Command> {
let mut command = self.cmd()?;
command.arg("--repl");
Ok(command)
}
pub fn compile_lib(&self, target: impl AsRef<Path>) -> Result<Command> {
ide_ci::fs::require_exist(&target)?;
let mut command = self.cmd()?;
@ -82,9 +87,8 @@ impl BuiltEnso {
let _httpbin = crate::httpbin::get_and_spawn_httpbin_on_free_port().await?;
let _postgres = match TARGET_OS {
OS::Linux => {
let runner_context_string = crate::env::RunnerContainerName
.fetch()
.map(|name| name.0)
let runner_context_string = crate::env::ENSO_RUNNER_CONTAINER_NAME
.get_raw()
.or_else(|_| ide_ci::actions::env::RUNNER_NAME.get())
.unwrap_or_else(|_| Uuid::new_v4().to_string());
// GH-hosted runners are named like "GitHub Actions 10". Spaces are not allowed in

View File

@ -1,28 +1,16 @@
#[allow(unused_imports)]
use crate::prelude::*;
use ide_ci::env::Variable;
use ide_ci::define_env_var;
use ide_ci::programs::docker::ContainerId;
#[derive(Clone, Copy, Debug)]
pub struct ReleaseId;
impl Variable for ReleaseId {
const NAME: &'static str = "ENSO_RELEASE_ID";
type Value = octocrab::models::ReleaseId;
}
define_env_var! {
ENSO_RELEASE_ID, octocrab::models::ReleaseId;
#[derive(Clone, Copy, Debug)]
pub struct RunnerContainerName;
impl Variable for RunnerContainerName {
const NAME: &'static str = "ENSO_RUNNER_CONTAINER_NAME";
type Value = ContainerId;
}
/// Name of the container that is running the current build.
ENSO_RUNNER_CONTAINER_NAME, ContainerId;
#[derive(Clone, Copy, Debug)]
pub struct NightlyEditionsLimit;
impl Variable for NightlyEditionsLimit {
const NAME: &'static str = "ENSO_NIGHTLY_EDITIONS_LIMIT";
type Value = usize;
ENSO_NIGHTLY_EDITIONS_LIMIT, usize;
}

View File

@ -1,18 +1,16 @@
use crate::prelude::*;
use ide_ci::env::Variable;
use ide_ci::programs::Go;
use tokio::process::Child;
pub mod env {
/// Environment variable that stores URL under which spawned httpbin server is available.
#[derive(Clone, Copy, Debug)]
pub struct Url;
impl ide_ci::env::Variable for Url {
const NAME: &'static str = "ENSO_HTTP_TEST_HTTPBIN_URL";
type Value = url::Url;
use super::*;
ide_ci::define_env_var! {
/// Environment variable that stores URL under which spawned httpbin server is available.
ENSO_HTTP_TEST_HTTPBIN_URL, Url;
}
}
@ -40,14 +38,14 @@ pub async fn get_and_spawn_httpbin(port: u16) -> Result<Spawned> {
let url_string = format!("http://localhost:{port}");
let url = Url::parse(&url_string)?;
env::Url.set(&url);
env::ENSO_HTTP_TEST_HTTPBIN_URL.set(&url)?;
Ok(Spawned { url, process })
}
impl Drop for Spawned {
fn drop(&mut self) {
debug!("Dropping the httpbin wrapper.");
env::Url.remove();
env::ENSO_HTTP_TEST_HTTPBIN_URL.remove();
}
}

View File

@ -9,8 +9,8 @@ use crate::project::ProcessWrapper;
use anyhow::Context;
use futures_util::future::try_join;
use futures_util::future::try_join4;
use ide_ci::github::RepoRef;
use ide_ci::io::download_all;
use ide_ci::models::config::RepoContext;
use ide_ci::program::command;
use ide_ci::program::EMPTY_ARGS;
use ide_ci::programs::node::NpmCommand;
@ -35,7 +35,7 @@ pub const IDE_ASSETS_URL: &str =
pub const ARCHIVED_ASSET_FILE: &str = "ide-assets-main/content/assets/";
pub const GOOGLE_FONTS_REPOSITORY: &str = "google/fonts";
pub const GOOGLE_FONTS_REPOSITORY: RepoRef = RepoRef { owner: "google", name: "fonts" };
pub const GOOGLE_FONT_DIRECTORY: &str = "ofl";
@ -81,6 +81,13 @@ pub mod env {
/// The app-specific password (not Apple ID password). See:
/// https://support.apple.com/HT204397
APPLEIDPASS, String;
/// `true` or `false`. Defaults to `true` — on a macOS development machine valid and
/// appropriate identity from your keychain will be automatically used.
CSC_IDENTITY_AUTO_DISCOVERY, bool;
/// Path to the python2 executable, used by electron-builder on macOS to package DMG.
PYTHON_PATH, PathBuf;
}
}
@ -102,9 +109,9 @@ pub async fn download_google_font(
output_path: impl AsRef<Path>,
) -> Result<Vec<Content>> {
let destination_dir = output_path.as_ref();
let repo = RepoContext::from_str(GOOGLE_FONTS_REPOSITORY)?;
let repo = GOOGLE_FONTS_REPOSITORY.handle(octocrab);
let path = format!("{GOOGLE_FONT_DIRECTORY}/{family}");
let files = repo.repos(octocrab).get_content().path(path).send().await?;
let files = repo.repos().get_content().path(path).send().await?;
let ttf_files =
files.items.into_iter().filter(|file| file.name.ends_with(".ttf")).collect_vec();
for file in &ttf_files {
@ -293,8 +300,6 @@ impl IdeDesktop {
// When watching we expect our artifacts to be served through server, not appear in any
// specific location on the disk.
let output_path = TempDir::new()?;
// let span = tracing::
// let wasm = wasm.inspect()
let watch_environment =
ContentEnvironment::new(self, wasm, build_info, output_path).await?;
Span::current().record("wasm", watch_environment.wasm.as_str());
@ -352,7 +357,7 @@ impl IdeDesktop {
let content_build = self
.npm()?
.set_env(env::ENSO_BUILD_GUI, gui.as_ref())?
.set_env(env::ENSO_BUILD_GUI, gui.as_path())?
.set_env(env::ENSO_BUILD_PROJECT_MANAGER, project_manager.as_ref())?
.set_env(env::ENSO_BUILD_IDE, output_path.as_ref())?
.set_env_opt(env::ENSO_BUILD_IDE_BUNDLED_ENGINE_VERSION, engine_version_to_use)?
@ -367,12 +372,25 @@ impl IdeDesktop {
let (icons, _content) = try_join(icons_build, content_build).await?;
let python_path = if TARGET_OS == OS::MacOS {
// On macOS electron-builder will fail during DMG creation if there is no python2
// installed. It is looked for in `/usr/bin/python` which is not valid place on newer
// MacOS versions.
// We can work around this by setting the `PYTHON_PATH` env variable. We attempt to
// locate `python2` in PATH which is enough to work on GitHub-hosted macOS
// runners.
Some(ide_ci::program::lookup("python2")?)
} else {
None
};
self.npm()?
.try_applying(&icons)?
// .env("DEBUG", "electron-builder")
.set_env(env::ENSO_BUILD_GUI, gui.as_ref())?
.set_env(env::ENSO_BUILD_GUI, gui.as_path())?
.set_env(env::ENSO_BUILD_IDE, output_path.as_ref())?
.set_env(env::ENSO_BUILD_PROJECT_MANAGER, project_manager.as_ref())?
.set_env_opt(env::PYTHON_PATH, python_path.as_ref())?
.workspace(Workspaces::Enso)
// .args(["--loglevel", "verbose"])
.run("dist", EMPTY_ARGS)

View File

@ -28,6 +28,10 @@
#![warn(unused_import_braces)]
#![warn(unused_qualifications)]
extern crate core;
use crate::prelude::*;
use anyhow::Context;

View File

@ -16,6 +16,10 @@ ide_ci::define_env_var! {
/// Directory where JUnit-format test run results are stored.
/// These are generated as part of the standard library test suite run.
ENSO_TEST_JUNIT_DIR, PathBuf;
/// Used to overwrite the default location of data directory. See:
/// <https://enso.org/docs/developer/enso/distribution/distribution.html#installed-enso-distribution-layout>.
ENSO_DATA_DIRECTORY, PathBuf;
}
pub const EDITION_FILE_ARTIFACT_NAME: &str = "Edition File";
@ -59,7 +63,7 @@ impl ComponentPaths {
Self { name, root, dir, artifact_archive }
}
pub fn emit_to_actions(&self, prefix: &str) -> Result {
pub async fn emit_to_actions(&self, prefix: &str) -> Result {
let paths = [
("NAME", &self.name),
("ROOT", &self.root),
@ -70,7 +74,8 @@ impl ComponentPaths {
ide_ci::actions::workflow::set_env(
&iformat!("{prefix}_DIST_{what}"),
&path.to_string_lossy(),
)?;
)
.await?;
}
Ok(())
}
@ -182,7 +187,7 @@ impl Paths {
/// Sets the environment variables in the current process and in GitHub Actions Runner (if being
/// run in its environment), so future steps of the job also have access to them.
pub fn emit_env_to_actions(&self) -> Result {
pub async fn emit_env_to_actions(&self) -> Result {
let components = [
("ENGINE", &self.engine),
("LAUNCHER", &self.launcher),
@ -190,11 +195,11 @@ impl Paths {
];
for (prefix, paths) in components {
paths.emit_to_actions(prefix)?;
paths.emit_to_actions(prefix).await?;
}
ide_ci::actions::workflow::set_env("TARGET_DIR", &self.target.to_string_lossy())?;
ENSO_TEST_JUNIT_DIR.set_workflow_env(self.test_results.as_path())?;
ide_ci::actions::workflow::set_env("TARGET_DIR", &self.target.to_string_lossy()).await?;
ENSO_TEST_JUNIT_DIR.set_workflow_env(self.test_results.as_path()).await?;
Ok(())
}
@ -264,18 +269,21 @@ pub fn root_to_changelog(root: impl AsRef<Path>) -> PathBuf {
/// The default value of `ENSO_DATA_DIRECTORY`.
/// See: <https://enso.org/docs/developer/enso/distribution/distribution.html#installed-enso-distribution-layout>
///
/// We use it as a fallback when the environment variable is not set.
pub fn default_data_directory() -> PathBuf {
let project_path = match TARGET_OS {
OS::MacOS => "org.enso",
_ => "enso",
};
// We can unwrap, because all systems we target define data local directory.
// This is enforced by the unit test below.
dirs::data_local_dir().unwrap().join(project_path)
}
/// Get the `ENSO_DATA_DIRECTORY` path.
pub fn data_directory() -> PathBuf {
std::env::var_os("ENSO_DATA_DIRECTORY").map_or_else(default_data_directory, PathBuf::from)
ENSO_DATA_DIRECTORY.get().unwrap_or_else(|_| default_data_directory())
}
/// Get the place where global IR caches are stored.
@ -302,3 +310,14 @@ pub fn parent_cargo_toml(initial_path: impl AsRef<Path>) -> Result<PathBuf> {
ensure!(path.pop(), "No Cargo.toml found for {}", initial_path.as_ref().display());
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn default_data_directory_is_present() {
// We just check that the function does not panic, as it has unwrap.
default_data_directory();
}
}

View File

@ -52,9 +52,9 @@ pub enum EndpointConfiguration {
impl EndpointConfiguration {
/// Tries to deduce what endpoint should be used for a spawned Postgres service.
pub fn deduce() -> Result<Self> {
if let Ok(container_name) = std::env::var("ENSO_RUNNER_CONTAINER_NAME") {
if let Ok(container_name) = crate::env::ENSO_RUNNER_CONTAINER_NAME.get() {
debug!("Assuming that I am in the Docker container named {container_name}.");
Ok(Self::Container { owner: ContainerId(container_name) })
Ok(Self::Container { owner: container_name })
} else {
// If we are running on the bare machine (i.e. not in container), we spawn postgres
// and expose it on a free host port. Then we can directly consume.
@ -86,7 +86,7 @@ impl Configuration {
pub fn set_enso_test_env(&self) -> Result {
env::tests::ENSO_DATABASE_TEST_DB_NAME.set(&self.database_name)?;
env::tests::ENSO_DATABASE_TEST_HOST.set(match &self.endpoint {
env::tests::ENSO_DATABASE_TEST_HOST.set(&match &self.endpoint {
EndpointConfiguration::Host { port } => format!("localhost:{port}"),
EndpointConfiguration::Container { .. } =>
format!("localhost:{POSTGRES_CONTAINER_DEFAULT_PORT}"),

View File

@ -72,11 +72,6 @@ impl<T> PlainArtifact<T> {
pub fn new(path: impl Into<PathBuf>) -> Self {
Self { path: path.into(), phantom: default() }
}
fn from_existing(path: impl AsRef<Path>) -> BoxFuture<'static, Result<Self>>
where T: Send + Sync + 'static {
ready(Ok(Self::new(path.as_ref()))).boxed()
}
}
/// State available to all project-related operations.
@ -224,15 +219,18 @@ pub trait IsTarget: Clone + Debug + Sized + Send + Sync + 'static {
) -> BoxFuture<'static, Result<Self::Artifact>> {
let Context { octocrab, cache, upload_artifacts: _, repo_root: _ } = context;
let CiRunSource { run_id, artifact_name, repository } = ci_run;
let repository = repository.handle(&octocrab);
let span = info_span!("Downloading CI Artifact.", %artifact_name, %repository, target = output_path.as_str());
let this = self.clone();
async move {
let artifact =
repository.find_artifact_by_name(&octocrab, run_id, &artifact_name).await?;
let artifact = repository.find_artifact_by_name(run_id, &artifact_name).await?;
info!("Will download artifact: {:#?}", artifact);
let artifact_to_get = cache::artifact::ExtractedArtifact {
client: octocrab.clone(),
key: cache::artifact::Key { artifact_id: artifact.id, repository },
key: cache::artifact::Key {
artifact_id: artifact.id,
repository: repository.repo,
},
};
let artifact = cache.get(artifact_to_get).await?;
let inner_archive_path =
@ -244,10 +242,34 @@ pub trait IsTarget: Clone + Debug + Sized + Send + Sync + 'static {
.boxed()
}
fn find_asset(&self, _assets: Vec<Asset>) -> Result<Asset> {
fn find_asset<'a>(&self, release: &'a octocrab::models::repos::Release) -> Result<&'a Asset> {
release.assets.iter().find(|asset| self.matches_asset(asset)).with_context(|| {
let asset_names = release.assets.iter().map(|asset| &asset.name).join(", ");
format!(
"No matching asset for target {:?} in release {:?}. Available assets: {}",
self, release, asset_names
)
})
}
fn matches_asset(&self, _asset: &Asset) -> bool {
todo!("Not implemented for target {self:?}!")
}
// /// Upload the artifact as an asset to the GitHub release.
// fn upload_asset(
// &self,
// release_handle: ReleaseHandle,
// output: impl Future<Output = Result<Self::Artifact>> + Send + 'static,
// ) -> BoxFuture<'static, Result> {
// async move {
// let artifact = output.await?;
// release_handle.upload_compressed_dir(&artifact).await?;
// Ok(())
// }
// .boxed()
// }
fn download_asset(
&self,
context: Context,
@ -261,7 +283,8 @@ pub trait IsTarget: Clone + Debug + Sized + Send + Sync + 'static {
let this = self.clone();
async move {
let ReleaseSource { asset_id, repository } = &source;
let archive_source = repository.download_asset_job(&octocrab, *asset_id);
let repository = repository.handle(&octocrab);
let archive_source = repository.download_asset_job(*asset_id);
let extract_job = cache::archive::ExtractedArchive {
archive_source,
path_to_extract: path_to_extract(),

View File

@ -150,16 +150,12 @@ impl IsTarget for Backend {
.boxed()
}
fn find_asset(&self, assets: Vec<Asset>) -> Result<Asset> {
assets
.into_iter()
.find(|asset| {
let name = &asset.name;
self.matches_platform(name)
&& is_archive_name(name)
&& name.contains("project-manager")
&& (name.contains("bundle") || asset.size > 200_000_000)
})
.context("Failed to find release asset with Enso Project Manager bundle.")
fn matches_asset(&self, asset: &Asset) -> bool {
// The size condition is used to discern actual artifact from its checksum.
let name = &asset.name;
self.matches_platform(name)
&& is_archive_name(name)
&& name.contains("project-manager")
&& (name.contains("bundle") || asset.size > 200_000_000)
}
}

View File

@ -2,11 +2,11 @@ use crate::prelude::*;
use crate::ide::web::IdeDesktop;
use crate::project::Context;
use crate::project::IsArtifact;
use crate::project::IsTarget;
use crate::project::IsWatchable;
use crate::project::IsWatcher;
use crate::project::PerhapsWatched;
use crate::project::PlainArtifact;
use crate::project::Wasm;
use crate::source::BuildTargetJob;
use crate::source::GetTargetJob;
@ -21,7 +21,23 @@ use ide_ci::ok_ready_boxed;
pub type Artifact = PlainArtifact<Gui>;
#[derive(Clone, Debug, PartialEq, Eq, Hash, Shrinkwrap)]
pub struct Artifact(crate::paths::generated::RepoRootDistGui);
impl AsRef<Path> for Artifact {
fn as_ref(&self) -> &Path {
self.0.as_path()
}
}
impl IsArtifact for Artifact {}
impl Artifact {
pub fn new(gui_path: impl AsRef<Path>) -> Self {
// TODO: sanity check
Self(crate::paths::generated::RepoRootDistGui::new_root(gui_path.as_ref()))
}
}
#[derive(Clone, Derivative, derive_more::Deref)]
#[derivative(Debug)]
@ -55,7 +71,7 @@ impl IsTarget for Gui {
}
fn adapt_artifact(self, path: impl AsRef<Path>) -> BoxFuture<'static, Result<Self::Artifact>> {
Artifact::from_existing(path)
ok_ready_boxed(Artifact::new(path))
}
fn build_internal(
@ -127,7 +143,7 @@ impl IsWatchable for Gui {
let wasm_artifacts = ok_ready_boxed(perhaps_watched_wasm.as_ref().clone());
let watch_process =
ide.watch_content(wasm_artifacts, &build_info.await?, watch_input.shell).await?;
let artifact = Self::Artifact::from_existing(destination).await?;
let artifact = Artifact::new(&destination);
let web_watcher = crate::project::Watcher { watch_process, artifact };
Ok(Self::Watcher { wasm: perhaps_watched_wasm, web: web_watcher })
}

View File

@ -14,7 +14,6 @@ use crate::source::WithDestination;
use derivative::Derivative;
use ide_ci::cache;
use ide_ci::env::Variable;
use ide_ci::fs::compressed_size;
use ide_ci::fs::copy_file_if_different;
use ide_ci::programs::cargo;
@ -208,7 +207,7 @@ impl IsTarget for Wasm {
command
.current_dir(&repo_root)
.kill_on_drop(true)
.env_remove(ide_ci::programs::rustup::env::Toolchain::NAME)
.env_remove(ide_ci::programs::rustup::env::RUSTUP_TOOLCHAIN.name())
.set_env(env::ENSO_ENABLE_PROC_MACRO_SPAN, &true)?
.build()
.arg(wasm_pack::Profile::from(*profile))

View File

@ -5,15 +5,30 @@ use crate::paths::generated;
use crate::paths::TargetTriple;
use crate::paths::EDITION_FILE_ARTIFACT_NAME;
use crate::project;
use crate::project::gui;
use crate::project::Gui;
use crate::project::IsTarget;
use crate::source::ExternalSource;
use crate::source::FetchTargetJob;
use ide_ci::github;
use ide_ci::github::release::ReleaseHandle;
use ide_ci::io::web::handle_error_response;
use ide_ci::programs::Docker;
use ide_ci::programs::SevenZip;
use octocrab::models::repos::Release;
use reqwest::Response;
use serde_json::json;
use tempfile::tempdir;
pub async fn create_release(context: &BuildContext) -> Result<Release> {
pub fn release_from_env(context: &BuildContext) -> Result<ReleaseHandle> {
let release_id = crate::env::ENSO_RELEASE_ID.get()?;
Ok(ReleaseHandle::new(&context.octocrab, context.remote_repo.clone(), release_id))
}
pub async fn draft_a_new_release(context: &BuildContext) -> Result<Release> {
let versions = &context.triple.versions;
let commit = ide_ci::actions::env::GITHUB_SHA.get()?;
@ -22,9 +37,10 @@ pub async fn create_release(context: &BuildContext) -> Result<Release> {
crate::changelog::Changelog(&changelog_contents).top_release_notes()?;
debug!("Preparing release {} for commit {}", versions.version, commit);
let release = context
.remote_repo
.repos(&context.octocrab)
.remote_repo_handle()
.repos()
.releases()
.create(&versions.tag())
.target_commitish(&commit)
@ -35,22 +51,22 @@ pub async fn create_release(context: &BuildContext) -> Result<Release> {
.send()
.await?;
crate::env::ReleaseId.emit(&release.id)?;
ide_ci::actions::workflow::set_output(&crate::env::ENSO_RELEASE_ID, &release.id).await?;
Ok(release)
}
pub async fn publish_release(context: &BuildContext) -> Result {
let BuildContext { inner: project::Context { octocrab, .. }, remote_repo, triple, .. } =
context;
let remote_repo = context.remote_repo_handle();
let BuildContext { inner: project::Context { .. }, triple, .. } = context;
let release_id = crate::env::ReleaseId.fetch()?;
let release_id = crate::env::ENSO_RELEASE_ID.get()?;
debug!("Looking for release with id {release_id} on github.");
let release = remote_repo.repos(octocrab).releases().get_by_id(release_id).await?;
let release = remote_repo.repos().releases().get_by_id(release_id).await?;
ensure!(release.draft, "Release has been already published!");
debug!("Found the target release, will publish it.");
remote_repo.repos(octocrab).releases().update(release.id.0).draft(false).send().await?;
remote_repo.repos().releases().update(release.id.0).draft(false).send().await?;
debug!("Done. Release URL: {}", release.url);
let temp = tempdir()?;
@ -68,68 +84,172 @@ pub async fn publish_release(context: &BuildContext) -> Result {
.await?;
debug!("Updating edition in the AWS S3.");
crate::aws::update_manifest(remote_repo, &edition_file_path).await?;
crate::aws::update_manifest(&remote_repo, &edition_file_path).await?;
Ok(())
}
pub async fn deploy_to_ecr(context: &BuildContext, repository: String) -> Result {
let octocrab = &context.octocrab;
let release_id = crate::env::ReleaseId.fetch()?;
/// Download the Enso Engine distribution from the GitHub release.
pub async fn get_engine_package<R: IsRepo>(
repo: &github::repo::Handle<R>,
output: impl AsRef<Path>,
triple: &TargetTriple,
) -> Result<generated::EnginePackage> {
let release_id = crate::env::ENSO_RELEASE_ID.get()?;
let package_name = generated::RepoRootBuiltDistribution::new_root(".", triple.to_string())
.enso_engine_triple
.file_name()
.context("Failed to get Engine Package name.")?
.as_str()
.to_string();
let linux_triple = TargetTriple { os: OS::Linux, ..context.triple.clone() };
let package_name =
generated::RepoRootBuiltDistribution::new_root(".", linux_triple.to_string())
.enso_engine_triple
.file_name()
.context("Failed to get Engine Package name.")?
.as_str()
.to_string();
let release = context.remote_repo.find_release_by_id(octocrab, release_id).await?;
let release = repo.find_release_by_id(release_id).await?;
let asset = github::find_asset_by_text(&release, &package_name)?;
let temp_for_archive = tempdir()?;
let downloaded_asset = context
.remote_repo
.download_asset_to(octocrab, asset, temp_for_archive.path().to_owned())
.await?;
let downloaded_asset =
repo.download_asset_to(asset, temp_for_archive.path().to_owned()).await?;
ide_ci::archive::extract_to(&downloaded_asset, output.as_ref()).await?;
let engine_package =
generated::EnginePackage::new_under(output.as_ref(), triple.versions.version.to_string());
Ok(engine_package)
}
/// Download the Enso Engine distribution from the GitHub release and build Runtime Docker image
/// from it.
pub async fn generate_runtime_image(
context: &BuildContext,
tag: impl Into<String>,
) -> Result<ide_ci::programs::docker::ImageId> {
// Our runtime images always target Linux.
let linux_triple = TargetTriple { os: OS::Linux, ..context.triple.clone() };
let temp_for_extraction = tempdir()?;
ide_ci::archive::extract_to(&downloaded_asset, &temp_for_extraction).await?;
let engine_package = generated::EnginePackage::new_under(
&temp_for_extraction,
context.triple.versions.version.to_string(),
);
let config = &aws_config::load_from_env().await;
let client = aws_sdk_ecr::Client::new(config);
let repository_uri = crate::aws::ecr::get_repository_uri(&client, &repository).await?;
let tag = format!("{}:{}", repository_uri, context.triple.versions.version);
let _image = crate::aws::ecr::runtime::build_runtime_image(
context.repo_root.tools.ci.docker.clone(),
engine_package,
tag.clone(),
let engine_package = get_engine_package(
&context.remote_repo_handle(),
temp_for_extraction.path(),
&linux_triple,
)
.await?;
crate::aws::ecr::runtime::build_runtime_image(
context.repo_root.tools.ci.docker.clone(),
engine_package,
tag.into(),
)
.await
}
/// Perform deploy of the backend to the ECR.
///
/// Downloads the Engine package from the release, builds the runtime image from it and pushes it
/// to our ECR.
pub async fn deploy_to_ecr(context: &BuildContext, repository: String) -> Result {
let client = crate::aws::ecr::client_from_env().await;
let repository_uri = crate::aws::ecr::get_repository_uri(&client, &repository).await?;
let tag = format!("{}:{}", repository_uri, context.triple.versions.version);
// We don't care about the image ID, we will refer to it by the tag.
let _image_id = generate_runtime_image(context, &tag).await?;
let credentials = crate::aws::ecr::get_credentials(&client).await?;
Docker.while_logged_in(credentials, || async move { Docker.push(&tag).await }).await?;
Ok(())
}
pub async fn dispatch_cloud_image_build_action(octocrab: &Octocrab, version: &Version) -> Result {
let input = serde_json::json!({
"version": version.to_string(),
});
octocrab
.actions()
.create_workflow_dispatch("enso-org", "cloud-v2", "build-image.yaml", "main")
.inputs(input)
.send()
.await
.context("Failed to dispatch the cloud image build action.")
/// Download the GUI artifacts from the current CI run artifacts.
pub async fn get_gui_from_current_ci_run(
context: &BuildContext,
out_dir: impl Into<PathBuf>,
) -> Result<gui::Artifact> {
let target = Gui;
let source = ExternalSource::new_ongoing_ci_run(target.artifact_name());
let fetch_job = FetchTargetJob { destination: out_dir.into(), inner: source };
target.get_external(context.inner.clone(), fetch_job).await
}
/// Upload GUI to the cloud (AWS S3).
pub async fn upload_gui_to_cloud_good(context: &BuildContext) -> Result {
let temp = tempdir()?;
let gui = get_gui_from_current_ci_run(context, temp.path()).await?;
upload_gui_to_cloud(&gui.assets, &context.triple.versions.version).await?;
notify_cloud_about_gui(&context.triple.versions.version).await?;
Ok(())
}
/// Upload GUI to the cloud (AWS S3).
pub async fn upload_gui_to_cloud(
assets: &crate::paths::generated::RepoRootDistGuiAssets,
version: &Version,
) -> Result {
let bucket = crate::aws::s3::gui::context(version).await?;
// Some file we upload as-is, some gzipped. This seems somewhat arbitrary now.
let files_to_upload = [assets.ide_wasm.as_path(), assets.style_css.as_path()];
let files_to_upload_gzipped = [assets.index_js.as_path(), assets.wasm_imports_js.as_path()];
for file in files_to_upload.iter() {
bucket.put_file(file).await?;
}
put_files_gzipping(&bucket, &files_to_upload_gzipped).await?;
Ok(())
}
/// Packs given files with `gzip` and uploads them to the S3 bucket.
///
/// The files are uploaded with the same name, but with `.gz` extension.
pub async fn put_files_gzipping(
bucket: &crate::aws::s3::BucketContext,
files: impl IntoIterator<Item = impl AsRef<Path>>,
) -> Result {
let temp_for_gzipping = tempdir()?;
for file in files {
let gzipped_file = file.with_parent(&temp_for_gzipping).with_appended_extension("gz");
SevenZip.pack(&gzipped_file, [file]).await?;
bucket.put_file(&gzipped_file).await?;
}
Ok(())
}
#[context("Failed to notify the cloud about GUI upload in version {}.", version)]
pub async fn notify_cloud_about_gui(version: &Version) -> Result<Response> {
let body = json!({
"versionNumber": version.to_string(),
"versionType": "Ide"
});
let response = reqwest::Client::new()
.post("https://nngmxi3zr4.execute-api.eu-west-1.amazonaws.com/versions")
.header("x-enso-organization-id", "org-2BqGX0q2yCdONdmx3Om1MVZzmv3")
.header("Content-Type", "application/json")
.json(&body)
.send()
.await?;
debug!("Response code from the cloud: {}.", response.status());
handle_error_response(response).await
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
#[ignore]
async fn upload_gui() -> Result {
setup_logging()?;
let assets = crate::paths::generated::RepoRootDistGuiAssets::new_root(
r"H:\NBO\enso4\dist\gui\assets",
);
let version = "2022.1.1-dev.provisional.test.2".parse2()?;
upload_gui_to_cloud(&assets, &version).await?;
notify_cloud_about_gui(&version).await?;
Ok(())
}
#[tokio::test]
#[ignore]
async fn notify_cloud() -> Result {
setup_logging()?;
let version = Version::from_str("2022.1.1-rc.2")?;
notify_cloud_about_gui(&version).await?;
Ok(())
}
}

View File

@ -1,6 +1,13 @@
use crate::prelude::*;
// ==============
// === Export ===
// ==============
pub mod cloud;
/// Heuristic that checks if given path can be plausibly considered to be the root of the Enso
/// repository.

View File

@ -0,0 +1,60 @@
use crate::prelude::*;
use ide_ci::github;
use ide_ci::github::RepoRef;
pub const CLOUD_REPO: RepoRef = RepoRef { owner: "enso-org", name: "cloud-v2" };
pub const BUILD_IMAGE_WORKFLOW: &str = "build-image.yaml";
/// Build Image workflow input. Follows schema defined by
/// https://github.com/enso-org/cloud-v2/blob/main/.github/workflows/build-image.yaml#L4
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
pub struct BuildImageInput<T> {
runtime_version: T,
}
impl<T> BuildImageInput<T> {
pub fn new(runtime_version: T) -> Self {
Self { runtime_version }
}
}
/// This function tells the cloud to build Enso Backend image, based on the image that is in ECR.
///
/// In general, we want this function to be invoked after each ECR push.
#[instrument(fields(%version), skip(octocrab))]
pub async fn build_image_workflow_dispatch_input(octocrab: &Octocrab, version: &Version) -> Result {
let repo = CLOUD_REPO.handle(octocrab);
// We want to call our workflow on the default branch.
let default_branch = repo.get().await?.default_branch.with_context(|| {
format!(
"Failed to get the default branch of the {} repository. Missing field: `default_branch`.",
CLOUD_REPO
)
})?;
debug!("Will invoke on ref: '{}'", default_branch);
let input = BuildImageInput::new(version);
info!("Dispatching the cloud workflow to build the image.");
github::workflow::dispatch(&repo, BUILD_IMAGE_WORKFLOW, default_branch, &input).await
}
#[cfg(test)]
mod tests {
use super::*;
use crate::setup_octocrab;
#[tokio::test]
#[ignore]
async fn manual_call() -> Result {
setup_logging()?;
let octo = setup_octocrab().await?;
build_image_workflow_dispatch_input(&octo, &Version::parse("2022.1.1-nightly.2022-10-18")?)
.await?;
Ok(())
}
}

View File

@ -4,7 +4,7 @@ use crate::project::IsTarget;
use crate::project::IsWatchable;
use derivative::Derivative;
use ide_ci::models::config::RepoContext;
use ide_ci::github::Repo;
use octocrab::models::AssetId;
use octocrab::models::RunId;
@ -23,6 +23,12 @@ pub enum ExternalSource {
Release(ReleaseSource),
}
impl ExternalSource {
pub fn new_ongoing_ci_run(artifact_name: impl Into<String>) -> Self {
Self::OngoingCiRun(OngoingCiRunSource { artifact_name: artifact_name.into() })
}
}
#[derive(Derivative)]
#[derivative(Debug)]
pub enum Source<Target: IsTarget> {
@ -41,7 +47,7 @@ pub struct OngoingCiRunSource {
#[derivative(Debug)]
pub struct CiRunSource {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub repository: RepoContext,
pub repository: Repo,
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub run_id: RunId,
pub artifact_name: String,
@ -51,7 +57,7 @@ pub struct CiRunSource {
#[derivative(Debug)]
pub struct ReleaseSource {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub repository: RepoContext,
pub repository: Repo,
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub asset_id: AssetId,
}

View File

@ -1,3 +1,5 @@
//! Code that deals with the version of Enso.
use crate::prelude::*;
use anyhow::Context;
@ -5,7 +7,7 @@ use chrono::Datelike;
use derivative::Derivative;
use ide_ci::define_env_var;
use ide_ci::env::new::TypedVariable;
use ide_ci::models::config::RepoContext;
use ide_ci::github;
use octocrab::models::repos::Release;
use semver::Prerelease;
use std::collections::BTreeSet;
@ -18,64 +20,88 @@ use tracing::instrument;
// Variable that stores Enso Engine version.
define_env_var! {
/// The version of Enso (shared by GUI and Engine).
ENSO_VERSION, Version;
/// Edition name for the build.
///
/// By convention, this is the same as the version.
ENSO_EDITION, String;
/// Whether the development-specific Engine features should be disabled.
ENSO_RELEASE_MODE, bool;
}
pub const LOCAL_BUILD_PREFIX: &str = "dev";
pub const NIGHTLY_BUILD_PREFIX: &str = "nightly";
pub const RC_BUILD_PREFIX: &str = "rc";
pub fn default_dev_version() -> Version {
let mut ret = Version::new(0, 0, 0);
ret.pre = Prerelease::new(LOCAL_BUILD_PREFIX).unwrap();
ret
}
pub fn is_nightly_release(release: &Release) -> bool {
!release.draft && release.tag_name.contains(NIGHTLY_BUILD_PREFIX)
}
pub async fn nightly_releases(
octocrab: &Octocrab,
repo: &RepoContext,
) -> Result<impl Iterator<Item = Release>> {
Ok(repo.all_releases(octocrab).await?.into_iter().filter(is_nightly_release))
}
pub async fn latest_nightly_release(octocrab: &Octocrab, repo: &RepoContext) -> Result<Release> {
// TODO: this assumes that releases are returned in date order, to be confirmed
// (but having to download all the pages to see which is latest wouldn't be nice)
nightly_releases(octocrab, repo).await?.next().context("Failed to find any nightly releases.")
}
#[derive(Clone, Derivative, Serialize, Deserialize, Shrinkwrap, PartialEq, Eq)]
#[derivative(Debug)]
pub struct Versions {
#[shrinkwrap(main_field)]
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub version: Version,
pub release_mode: bool,
}
impl Default for Versions {
fn default() -> Self {
Versions { version: default_dev_version(), release_mode: false }
/// Check if the given GitHub release matches the provided kind.
pub fn is_release_of_kind(release: &Release, kind: Kind) -> bool {
match kind {
Kind::Dev => release.tag_name.contains(LOCAL_BUILD_PREFIX),
Kind::Nightly => release.tag_name.contains(NIGHTLY_BUILD_PREFIX),
Kind::Rc => release.tag_name.contains(RC_BUILD_PREFIX),
Kind::Stable => !release.prerelease,
}
}
/// List all releases in the GitHub repository that are of a given kind.
pub async fn releases_of_kind(
repo: &github::repo::Handle<impl IsRepo>,
kind: Kind,
) -> Result<impl Iterator<Item = Release>> {
Ok(repo.all_releases().await?.into_iter().filter(move |r| is_release_of_kind(r, kind)))
}
/// Get the latest nightly release in the GitHub repository.
pub async fn latest_nightly_release(repo: &github::repo::Handle<impl IsRepo>) -> Result<Release> {
// TODO: this assumes that releases are returned in date order, to be confirmed
// (but having to download all the pages to see which is latest wouldn't be nice)
releases_of_kind(repo, Kind::Nightly)
.await?
.next()
.context("Failed to find any nightly releases.")
}
/// Keeps the version of Enso, edition name and whether this version should be treated as a release.
///
/// Basically this is everything that is needed to define the version of the build.
#[derive(Clone, Derivative, Serialize, Deserialize, Shrinkwrap, PartialEq, Eq)]
#[derivative(Debug)]
pub struct Versions {
/// The version of Enso.
///
/// Currently it also doubles as the edition name. In future we might want to separate them.
#[shrinkwrap(main_field)]
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub version: Version,
/// Whether this version should be treated as a release.
///
/// This is later propagated to [`ENSO_RELEASE_MODE`] environment variable.
pub release_mode: bool,
}
impl Versions {
/// Create a new version from a single SemVer [`Version`] value.
///
/// Edition name will be deduced, to be the same as the version.
/// Whether this version should be treated as a release is deduced from the version's
/// [pre-release](https://semver.org/#spec-item-9) part.
pub fn new(version: Version) -> Self {
let release_mode = !version.pre.as_str().contains(LOCAL_BUILD_PREFIX)
&& !version.pre.as_str().contains("SNAPSHOT");
Versions { version, release_mode }
}
/// Get the edition name.
///
/// By convention, this is the same as the version.
pub fn edition_name(&self) -> String {
self.version.to_string()
}
/// Pretty print the product name and version, e.g. "Enso 2022.1.0".
pub fn pretty_name(&self) -> String {
format!("Enso {}", self.version)
}
@ -84,7 +110,9 @@ impl Versions {
Prerelease::new(LOCAL_BUILD_PREFIX).anyhow_err()
}
pub async fn nightly_prerelease(octocrab: &Octocrab, repo: &RepoContext) -> Result<Prerelease> {
pub async fn nightly_prerelease(
repo: &github::repo::Handle<impl IsRepo>,
) -> Result<Prerelease> {
let date = chrono::Utc::now();
let date = date.format("%F").to_string();
@ -98,7 +126,7 @@ impl Versions {
Ok(pre)
};
let relevant_nightly_versions = nightly_releases(octocrab, repo)
let relevant_nightly_versions = releases_of_kind(repo, Kind::Nightly)
.await?
.filter_map(|release| {
if release.tag_name.contains(&todays_pre_text) {
@ -121,15 +149,61 @@ impl Versions {
unreachable!("After infinite loop.")
}
/// Generate prerelease string for the "release candidate" release.
///
/// We list all the RC releases in the repository, and increment the number of the latest one.
pub async fn rc_prerelease(
version: &Version,
repo: &github::repo::Handle<impl IsRepo>,
) -> Result<Prerelease> {
let relevant_rc_versions = releases_of_kind(repo, Kind::Rc)
.await?
.filter_map(|release| {
let release_version = Version::parse(&release.tag_name).ok()?;
let version_matches = release_version.major == version.major
&& release_version.minor == version.minor
&& release_version.patch == version.patch;
version_matches.then_some(release_version.pre)
})
.collect::<BTreeSet<_>>();
// Generate subsequent RC sub-releases, until a free one is found.
// Should happen rarely.
for index in 0.. {
let pre = Prerelease::from_str(&format!("{}.{}", RC_BUILD_PREFIX, index))?;
if !relevant_rc_versions.contains(&pre) {
return Ok(pre);
}
}
unreachable!("After infinite loop.")
}
/// Get a git tag that should be applied to a commit released as this version.
pub fn tag(&self) -> String {
self.version.to_string()
}
pub fn publish(&self) -> Result {
pub async fn publish(&self) -> Result {
let edition = self.edition_name();
ENSO_VERSION.emit_to_workflow(&self.version)?;
ENSO_EDITION.emit_to_workflow(edition.as_str())?;
ENSO_RELEASE_MODE.emit_to_workflow(&self.release_mode)?;
// Some components (like SBT) consume version information through these environment
// variables.
ENSO_VERSION.set(&self.version)?;
ENSO_EDITION.set(&edition)?;
ENSO_RELEASE_MODE.set(&self.release_mode)?;
// This is actually used only in some workflows (primarily the release one, where release
// creation and the asset compilation happen in separate jobs). Still, no harm in doing this
// always.
//
// Note that our output names are the same as the environment variable names.
ide_ci::actions::workflow::set_output(ENSO_VERSION.name, &ENSO_VERSION.get_raw()?).await?;
ide_ci::actions::workflow::set_output(ENSO_EDITION.name, &ENSO_EDITION.get_raw()?).await?;
ide_ci::actions::workflow::set_output(
ENSO_RELEASE_MODE.name,
&ENSO_RELEASE_MODE.get_raw()?,
)
.await?;
Ok(())
}
}
@ -175,7 +249,7 @@ pub fn suggest_next_version(previous: &Version) -> Version {
}
#[instrument(ret)]
pub fn versions_from_env(expected_build_kind: Option<BuildKind>) -> Result<Option<Versions>> {
pub fn versions_from_env(expected_build_kind: Option<Kind>) -> Result<Option<Versions>> {
if let Ok(version) = ENSO_VERSION.get() {
// The currently adopted version scheme uses same string for version and edition name,
// so we enforce it here. There are no fundamental reasons for this requirement.
@ -188,7 +262,7 @@ pub fn versions_from_env(expected_build_kind: Option<BuildKind>) -> Result<Optio
);
}
if let Some(expected_build_kind) = expected_build_kind {
let found_build_kind = BuildKind::deduce(&version)?;
let found_build_kind = Kind::deduce(&version)?;
ensure!(
found_build_kind == expected_build_kind,
"Build kind mismatch. Found: {}, expected: {}.",
@ -204,23 +278,25 @@ pub fn versions_from_env(expected_build_kind: Option<BuildKind>) -> Result<Optio
}
#[instrument(skip_all, ret)]
pub async fn deduce_versions(
octocrab: &Octocrab,
build_kind: BuildKind,
target_repo: Result<&RepoContext>,
pub async fn deduce_or_generate(
repo: Result<&github::repo::Handle<impl IsRepo>>,
kind: Kind,
root_path: impl AsRef<Path>,
) -> Result<Versions> {
debug!("Deciding on version to target.");
if let Some(versions) = versions_from_env(Some(build_kind))? {
if let Some(versions) = versions_from_env(Some(kind))? {
Ok(versions)
} else {
let changelog_path = crate::paths::root_to_changelog(&root_path);
let base_version = base_version(&changelog_path)?;
let version = Version {
pre: match build_kind {
BuildKind::Dev => Versions::local_prerelease()?,
BuildKind::Nightly => Versions::nightly_prerelease(octocrab, target_repo?).await?,
pre: match kind {
Kind::Dev => Versions::local_prerelease()?,
Kind::Nightly => Versions::nightly_prerelease(repo?).await?,
Kind::Rc => Versions::rc_prerelease(&base_version, repo?).await?,
Kind::Stable => todo!(), //Versions::stable(repo?).await?,
},
..base_version(&changelog_path)?
..base_version
};
Ok(Versions::new(version))
}
@ -232,14 +308,14 @@ mod tests {
#[test]
fn is_nightly_test() {
let is_nightly = |text: &str| BuildKind::Nightly.matches(&Version::parse(text).unwrap());
let is_nightly = |text: &str| Kind::Nightly.matches(&Version::parse(text).unwrap());
assert!(is_nightly("2022.1.1-nightly.2022.1.1"));
assert!(is_nightly("2022.1.1-nightly"));
assert!(is_nightly("2022.1.1-nightly.2022.1.1"));
assert!(is_nightly("2022.1.1-nightly.2022.1.1"));
let version = Version::parse("2022.1.1-nightly.2022-06-06.3").unwrap();
assert!(BuildKind::deduce(&version).contains(&BuildKind::Nightly));
assert!(Kind::deduce(&version).contains(&Kind::Nightly));
}
#[test]
@ -250,18 +326,33 @@ mod tests {
}
}
#[derive(clap::ArgEnum, Clone, Copy, PartialEq, Eq, Debug, EnumString, EnumIter, strum::Display)]
#[derive(
clap::ArgEnum,
Clone,
Copy,
PartialEq,
Eq,
Debug,
EnumString,
EnumIter,
strum::Display,
strum::AsRefStr
)]
#[strum(serialize_all = "kebab-case")]
pub enum BuildKind {
pub enum Kind {
Dev,
Nightly,
Rc,
Stable,
}
impl BuildKind {
impl Kind {
pub fn prerelease_prefix(self) -> &'static str {
match self {
BuildKind::Dev => LOCAL_BUILD_PREFIX,
BuildKind::Nightly => NIGHTLY_BUILD_PREFIX,
Kind::Dev => LOCAL_BUILD_PREFIX,
Kind::Nightly => NIGHTLY_BUILD_PREFIX,
Kind::Rc => RC_BUILD_PREFIX,
Kind::Stable => "",
}
}
@ -270,7 +361,7 @@ impl BuildKind {
}
pub fn deduce(version: &Version) -> Result<Self> {
BuildKind::iter()
Kind::iter()
.find(|kind| kind.matches(version))
.context(format!("Failed to deduce build kind for version {version}"))
}

View File

@ -21,10 +21,10 @@ data-encoding = "2.3.2"
derivative = "2.2.0"
derive_more = "0.99.17"
dirs = "4.0.0"
enso-build-base = { path = "../base" }
filetime = "0.2.15"
flate2 = "1.0.22"
flume = "0.10.10"
fn-error-context = "0.2.0"
fs_extra = "1.2.0"
futures = "0.3.17"
futures-util = "0.3.17"
@ -60,7 +60,6 @@ regex = "1.5.4"
reqwest = { version = "0.11.5", default-features = false, features = [
"stream"
] }
snafu = "0.7.0"
semver = { version = "1.0.4", features = ["serde"] }
serde = { version = "1.0.130", features = ["derive"] }
serde_json = "1.0.68"

View File

@ -1,3 +1,6 @@
//! General utilities for working within the GitHub Actions environment.
// ==============
// === Export ===
// ==============
@ -5,6 +8,7 @@
pub mod artifacts;
pub mod context;
pub mod env;
pub mod env_file;
pub mod workflow;
pub use context::Context;

View File

@ -151,7 +151,7 @@ pub async fn upload_compressed_directory(
let archive_path = tempdir.path().join(format!("{artifact_name}.tar.gz"));
info!("Packing {} to {}", path_to_upload.as_ref().display(), archive_path.display());
crate::archive::pack_directory_contents(&archive_path, path_to_upload).await?;
crate::archive::compress_directory(&archive_path, path_to_upload).await?;
info!("Starting upload of {artifact_name}.");
upload_single_file(&archive_path, artifact_name).await?;

View File

@ -1,4 +1,3 @@
#[allow(unused_imports)]
use crate::prelude::*;
use octocrab::models;
@ -20,6 +19,7 @@ pub struct WebhookPayload {
/// Corresponds to https://github.com/actions/toolkit/blob/main/packages/github/src/context.ts
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Context {
/// Webhook payload object that triggered the workflow.
pub payload: WebhookPayload,
pub event_name: String,
pub sha: String,
@ -37,6 +37,7 @@ pub struct Context {
impl Context {
/// Creates a new context from the environment.
#[context("Failed to create a new GitHub context from the environment.")]
pub fn from_env() -> Result<Self> {
let payload: WebhookPayload =
if let Ok(event_path) = crate::actions::env::GITHUB_EVENT_PATH.get() {

View File

@ -3,7 +3,9 @@
use crate::prelude::*;
use crate::define_env_var;
use crate::models::config::RepoContext;
use crate::env::new::RawVariable;
use crate::env::new::TypedVariable;
use crate::github::Repo;
@ -23,7 +25,7 @@ define_env_var! {
/// For a step executing an action, this is the owner and repository name of the action.
/// For example, `actions/checkout`.
GITHUB_ACTION_REPOSITORY, RepoContext;
GITHUB_ACTION_REPOSITORY, Repo;
/// Always set to true when GitHub Actions is running the workflow. You can use this variable
/// to differentiate when tests are being run locally or by GitHub Actions.
@ -45,6 +47,10 @@ define_env_var! {
/// `/home/runner/work/_temp/_runner_file_commands/set_env_87406d6e-4979-4d42-98e1-3dab1f48b13a`.
GITHUB_ENV, PathBuf;
/// Path to the environment file with step's output parameters. This file is unique to the
/// current step and changes for each step in a job.
GITHUB_OUTPUT, PathBuf;
/// The name of the event that triggered the workflow. For example, `workflow_dispatch`.
GITHUB_EVENT_NAME, String;
@ -89,7 +95,7 @@ define_env_var! {
GITHUB_REF_TYPE, String;
/// The owner and repository name. For example, octocat/Hello-World.
GITHUB_REPOSITORY, RepoContext;
GITHUB_REPOSITORY, Repo;
/// The repository owner's name. For example, octocat.
GITHUB_REPOSITORY_OWNER, String;
@ -123,7 +129,7 @@ define_env_var! {
/// This file is unique to the current step and changes for each step in a job. For example,
/// `/home/rob/runner/_layout/_work/_temp/_runner_file_commands/step_summary_1cb22d7f-5663-41a8-9ffc-13472605c76c`.
/// For more information, see "Workflow commands for GitHub Actions."
GITHUB_STEP_SUMMARY, String;
GITHUB_STEP_SUMMARY, PathBuf;
/// The name of the workflow. For example, `My test workflow`. If the workflow file doesn't
/// specify a name, the value of this variable is the full path of the workflow file in the
@ -166,3 +172,9 @@ pub fn is_self_hosted() -> Result<bool> {
let name = RUNNER_NAME.get_raw()?;
Ok(!name.starts_with("GitHub Actions"))
}
pub async fn set_and_emit<V>(var: &V, value: &V::Borrowed) -> Result
where V: TypedVariable {
let value_raw = var.generate(value)?;
crate::actions::workflow::set_env(var.name(), &value_raw).await
}

View File

@ -0,0 +1,83 @@
//! During the execution of a workflow, the runner generates temporary files that can be used to
//! perform certain actions. The path to these files are exposed via environment variables.
//!
//! See <https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files> for more information.
use crate::prelude::*;
use crate::actions::env;
use crate::env::new::PathBufVariable;
// ============================
// === GitHub-defined files ===
// ============================
/// Environment file that can be used to set environment variables for the subsequent steps of the
/// current job. See: <https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-environment-variable>
pub static GITHUB_ENV: EnvironmentFile = EnvironmentFile::new(env::GITHUB_ENV);
/// Environment file used to set current step's output parameters. See: <https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter>
pub static GITHUB_OUTPUT: EnvironmentFile = EnvironmentFile::new(env::GITHUB_OUTPUT);
/// Environment file used to store job's summary. See: <https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary>
pub static GITHUB_STEP_SUMMARY: EnvironmentFile = EnvironmentFile::new(env::GITHUB_STEP_SUMMARY);
/// File with environment variables that will be set for subsequent steps of the current job.
pub static GITHUB_PATH: EnvironmentFile = EnvironmentFile::new(env::GITHUB_PATH);
// =======================
// === EnvironmentFile ===
// =======================
/// Structure that handles access to the environment file.
///
/// Contains mutex for synchronization, so the different threads can access the file safely.
#[derive(Debug)]
pub struct EnvironmentFile {
/// Environment variable that contains path to the file.
pub env_var: PathBufVariable,
/// Mutex for synchronization.
mutex: tokio::sync::Mutex<()>,
}
impl EnvironmentFile {
/// Create a new environment file accessor.
pub const fn new(env_var: PathBufVariable) -> Self {
Self { env_var, mutex: tokio::sync::Mutex::const_new(()) }
}
/// Read the file contents.
pub async fn read(&self) -> Result<String> {
let _guard = self.mutex.lock().await;
let path = self.env_var.get()?;
crate::fs::tokio::read_to_string(path).await
}
/// Appends line to the file.
pub async fn append_line(&self, line: impl AsRef<str>) -> Result {
let _guard = self.mutex.lock().await;
let path = self.env_var.get()?;
let mut line = line.as_ref().to_string();
if !line.ends_with('\n') {
line.push('\n');
};
crate::fs::tokio::append(path, line).await
}
/// Append key-value pair to the file.
///
/// Automatically generates a unique delimiter, so the value is allowed to contain `=` or
/// newline characters.
pub async fn append_key_value(&self, key: impl AsRef<str>, value: impl AsRef<str>) -> Result {
let key = key.as_ref();
let value = value.as_ref();
let delimiter = format!("ghadelimiter_{}", Uuid::new_v4());
ensure!(!key.contains(&delimiter), "Key cannot contain delimiter {}.", delimiter);
ensure!(!value.contains(&delimiter), "Value cannot contain delimiter {}.", delimiter);
let line = format!("{key}<<{delimiter}\n{value}\n{delimiter}");
self.append_line(line).await
}
}

View File

@ -1,8 +1,7 @@
use crate::prelude::*;
use crate::actions::env;
use std::io::Write;
use crate::actions::env_file;
// ==============
@ -22,10 +21,13 @@ pub fn is_in_env() -> bool {
/// Sets an action's output parameter.
///
/// See: <https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#setting-an-output-parameter>
pub fn set_output(name: &str, value: &impl ToString) {
let value = value.to_string();
debug!("Setting GitHub Actions step output {name} to {value}");
println!("::set-output name={name}::{value}");
pub async fn set_output(name: &str, value: &(impl ToString + ?Sized)) -> Result {
if is_in_env() {
let value = value.to_string();
debug!("Setting GitHub Actions step output {name} to {value}.");
env_file::GITHUB_OUTPUT.append_key_value(name, &value).await?;
}
Ok(())
}
/// Prints a debug message to the log.
@ -44,16 +46,18 @@ pub fn debug(message: &str) {
/// variables are case-sensitive and you can include punctuation.
///
/// Just logs and sets variable locally if used under non-GH CI.
pub fn set_env(name: &str, value: &impl ToString) -> Result {
pub fn set_env(name: impl AsRef<str>, value: &impl ToString) -> BoxFuture<'static, Result> {
let name = name.as_ref().to_string();
let value_string = value.to_string();
debug!("Will try writing Github Actions environment variable: {name}={value_string}");
std::env::set_var(name, value.to_string());
if is_in_env() {
let env_file = env::GITHUB_ENV.get()?;
let mut file = std::fs::OpenOptions::new().create_new(false).append(true).open(env_file)?;
writeln!(file, "{name}={value_string}")?;
async move {
std::env::set_var(&name, &value_string);
if is_in_env() {
debug!("Setting GitHub Actions environment variable {name} to {value_string}");
env_file::GITHUB_ENV.append_key_value(name, value_string).await?;
}
Ok(())
}
Ok(())
.boxed()
}
pub fn mask_text(text: impl AsRef<str>) {

View File

@ -3,11 +3,14 @@ use crate::prelude::*;
use crate::env::new::RawVariable;
use heck::ToKebabCase;
use std::collections::btree_map::Entry;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
pub const DEFAULT_TIMEOUT_IN_MINUTES: u32 = 360;
pub fn wrap_expression(expression: impl AsRef<str>) -> String {
format!("${{{{ {} }}}}", expression.as_ref())
}
@ -86,7 +89,12 @@ pub fn shell_os(os: OS, command_line: impl Into<String>) -> Step {
}
pub fn shell(command_line: impl Into<String>) -> Step {
Step { run: Some(command_line.into()), env: once(github_token_env()).collect(), ..default() }
Step {
run: Some(command_line.into()),
env: once(github_token_env()).collect(),
timeout_minutes: Some(DEFAULT_TIMEOUT_IN_MINUTES),
..default()
}
}
/// Invoke our entry point to the build scripts, i.e. the `./run` script.
@ -428,22 +436,28 @@ impl Event {
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct Job {
pub name: String,
pub name: String,
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
pub needs: BTreeSet<String>,
pub runs_on: Vec<RunnerLabel>,
pub steps: Vec<Step>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
pub outputs: BTreeMap<String, String>,
pub needs: BTreeSet<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub strategy: Option<Strategy>,
pub r#if: Option<String>,
pub runs_on: Vec<RunnerLabel>,
pub steps: Vec<Step>,
#[serde(skip_serializing_if = "Option::is_none")]
pub concurrency: Option<Concurrency>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
pub env: BTreeMap<String, String>,
pub outputs: BTreeMap<String, String>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
pub env: BTreeMap<String, String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub strategy: Option<Strategy>,
#[serde(skip_serializing_if = "Option::is_none")]
pub timeout_minutes: Option<u32>,
}
impl Job {
pub fn new(name: impl Into<String>) -> Self {
Self { name: name.into(), ..default() }
Self { name: name.into(), timeout_minutes: Some(DEFAULT_TIMEOUT_IN_MINUTES), ..default() }
}
pub fn expose_output(&mut self, step_id: impl AsRef<str>, output_name: impl Into<String>) {
@ -525,21 +539,25 @@ impl Strategy {
#[serde(rename_all = "kebab-case")]
pub struct Step {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
pub id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub r#if: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub uses: Option<String>,
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub run: Option<String>,
pub uses: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub r#if: Option<String>,
pub run: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub with: Option<step::Argument>,
pub shell: Option<Shell>,
#[serde(skip_serializing_if = "Option::is_none")]
pub with: Option<step::Argument>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
pub env: BTreeMap<String, String>,
pub env: BTreeMap<String, String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub shell: Option<Shell>,
pub continue_on_error: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub timeout_minutes: Option<u32>,
}
impl Step {
@ -553,7 +571,20 @@ impl Step {
}
pub fn with_env(mut self, name: impl Into<String>, value: impl Into<String>) -> Self {
self.env.insert(name.into(), value.into());
let name = name.into();
let value = value.into();
let entry = self.env.entry(name);
if let Entry::Occupied(mut entry) = entry {
warn!(
"Overriding environment variable `{}` with value `{}` (old value was `{}`)",
entry.key(),
value,
entry.get(),
);
*entry.get_mut() = value;
} else {
entry.or_insert(value);
}
self
}

View File

@ -1,46 +0,0 @@
use crate::prelude::*;
use anyhow::Error;
pub trait ResultExt<T, E> {
fn anyhow_err(self) -> Result<T>;
#[allow(clippy::type_complexity)]
fn flatten_fut(
self,
) -> futures::future::Either<
std::future::Ready<std::result::Result<T::Ok, T::Error>>,
futures::future::IntoFuture<T>,
>
where T: TryFuture<Error: From<E>>;
// fn flatten_fut(self) -> impl Future<Output = std::result::Result<T::Ok, T::Error>>
// where T: TryFuture<Error: From<E>> {
// async move { fut?.into_future().await }
// }
// fn flatten_fut(self)
// where T: TryFuture;
}
impl<T, E> ResultExt<T, E> for std::result::Result<T, E>
where E: Into<Error>
{
fn anyhow_err(self) -> Result<T> {
self.map_err(E::into)
}
fn flatten_fut(
self,
) -> futures::future::Either<
std::future::Ready<std::result::Result<T::Ok, T::Error>>,
futures::future::IntoFuture<T>,
>
where T: TryFuture<Error: From<E>> {
match self {
Ok(fut) => fut.into_future().right_future(),
Err(e) => ready(Err(T::Error::from(e))).left_future(),
}
}
}

View File

@ -115,7 +115,7 @@ pub fn is_archive_name(path: impl AsRef<Path>) -> bool {
skip_all,
fields(src=%root_directory.as_ref().display(), dest=%output_archive.as_ref().display()),
err)]
pub async fn pack_directory_contents(
pub async fn compress_directory(
output_archive: impl AsRef<Path>,
root_directory: impl AsRef<Path>,
) -> Result {

View File

@ -2,7 +2,7 @@ use crate::prelude::*;
use crate::cache::Cache;
use crate::cache::Storable;
use crate::models::config::RepoContext;
use crate::github::Repo;
use octocrab::models::ArtifactId;
@ -10,7 +10,7 @@ use octocrab::models::ArtifactId;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Key {
pub repository: RepoContext,
pub repository: Repo,
pub artifact_id: ArtifactId,
}
@ -34,7 +34,8 @@ impl Storable for ExtractedArtifact {
async move {
let ExtractedArtifact { client, key } = this;
let Key { artifact_id, repository } = key;
repository.download_and_unpack_artifact(&client, artifact_id, &store).await?;
let repository = repository.handle(&client);
repository.download_and_unpack_artifact(artifact_id, &store).await?;
Ok(())
}
.boxed()

View File

@ -10,6 +10,7 @@ use crate::io::web::stream_response_to_file;
use derivative::Derivative;
use headers::HeaderMap;
use reqwest::Client;
use reqwest::ClientBuilder;
use reqwest::IntoUrl;
use reqwest::Response;
@ -37,7 +38,7 @@ impl DownloadFile {
pub fn new(url: impl IntoUrl) -> Result<Self> {
Ok(Self {
key: Key { url: url.into_url()?, additional_headers: default() },
client: default(),
client: ClientBuilder::new().user_agent("enso-build").build()?,
})
}
@ -93,3 +94,8 @@ impl Storable for DownloadFile {
self.key.clone()
}
}
pub async fn download(cache: Cache, url: impl IntoUrl) -> Result<PathBuf> {
let download = DownloadFile::new(url)?;
cache.get(download).await
}

View File

@ -1,7 +1,7 @@
use crate::prelude::*;
use crate::cache::goodie::Goodie;
use crate::models::config::RepoContext;
use crate::github::Repo;
use crate::programs::java;
use crate::programs::java::JAVA_HOME;
use crate::programs::Java;
@ -36,8 +36,8 @@ pub async fn find_graal_version() -> Result<Version> {
}
/// The repository that contains the GraalVM CE releases for download.
pub fn ce_build_repository() -> RepoContext {
RepoContext { owner: GITHUB_ORGANIZATION.into(), name: CE_BUILDS_REPOSITORY.into() }
pub fn ce_build_repository() -> Repo {
Repo { owner: GITHUB_ORGANIZATION.into(), name: CE_BUILDS_REPOSITORY.into() }
}
/// Description necessary to download and install GraalVM.
@ -58,7 +58,8 @@ impl Goodie for GraalVM {
let client = self.client.clone();
let repo = ce_build_repository();
async move {
let release = repo.find_release_by_text(&client, &graal_version.to_string()).await?;
let repo = repo.handle(&client);
let release = repo.find_release_by_text(&graal_version.to_string()).await?;
crate::github::find_asset_url_by_text(&release, &platform_string).cloned()
}
.boxed()

View File

@ -5,6 +5,13 @@ use std::collections::BTreeSet;
use unicase::UniCase;
// ==============
// === Export ===
// ==============
pub mod known;
pub fn current_dir() -> Result<PathBuf> {
std::env::current_dir().context("Failed to get current directory.")
@ -16,6 +23,20 @@ pub fn set_current_dir(path: impl AsRef<Path>) -> Result {
std::env::set_current_dir(&path).anyhow_err()
}
/// Define typed accessors for environment variables. Supported types inclide `String`, `PathBuf`,
/// and other types that implement `FromStr`.
///
/// Example:
/// ```
/// # use std::path::PathBuf;
/// # use ide_ci::define_env_var;
/// # use ide_ci::env::new::TypedVariable;
/// define_env_var! {
/// /// Documentation.
/// ENV_VAR_NAME, PathBuf;
/// };
/// let path = ENV_VAR_NAME.get().unwrap_or_else(|_error| PathBuf::from("default"));
/// ```
#[macro_export]
macro_rules! define_env_var {
() => {};
@ -42,10 +63,6 @@ macro_rules! define_env_var {
};
}
pub mod known;
pub mod new {
use super::*;
use crate::program::command::FallibleManipulator;
@ -95,22 +112,21 @@ pub mod new {
self.parse(self.get_raw()?.as_str())
}
fn set(&self, value: impl AsRef<Self::Borrowed>) -> Result {
let value = self.generate(value.as_ref())?;
fn set(&self, value: &Self::Borrowed) -> Result {
let value = self.generate(value)?;
self.set_raw(value);
Ok(())
}
fn set_workflow_output(&self, value: impl Borrow<Self::Borrowed>) -> Result {
crate::actions::workflow::set_output(self.name(), &self.generate(value.borrow())?);
Ok(())
}
fn set_workflow_env(&self, value: impl Borrow<Self::Borrowed>) -> Result {
crate::actions::workflow::set_env(self.name(), &self.generate(value.borrow())?)
}
fn emit_to_workflow(&self, value: impl Borrow<Self::Borrowed>) -> Result {
self.set_workflow_output(value.borrow())?;
self.set_workflow_env(value.borrow())
fn set_workflow_env(
&self,
value: impl Borrow<Self::Borrowed>,
) -> BoxFuture<'static, Result> {
let name = self.name().to_string();
let value = self.generate(value.borrow());
value
.and_then_async(move |value| crate::actions::workflow::set_env(name, &value))
.boxed()
}
}
@ -150,9 +166,10 @@ pub mod new {
}
}
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, derive_more::Deref)]
pub struct SimpleVariable<Value, Borrowed: ?Sized = Value> {
pub name: Cow<'static, str>,
#[deref]
pub name: &'static str,
pub phantom_data: PhantomData<Value>,
pub phantom_data2: PhantomData<Borrowed>,
}
@ -163,9 +180,9 @@ pub mod new {
}
}
impl<Value, Borrowed: ?Sized> AsRef<str> for SimpleVariable<Value, Borrowed> {
impl<Value, Borrowed: ?Sized> const AsRef<str> for SimpleVariable<Value, Borrowed> {
fn as_ref(&self) -> &str {
&self.name
self.name
}
}
@ -183,17 +200,13 @@ pub mod new {
impl<Value, Borrowed: ?Sized> SimpleVariable<Value, Borrowed> {
pub const fn new(name: &'static str) -> Self {
Self {
name: Cow::Borrowed(name),
phantom_data: PhantomData,
phantom_data2: PhantomData,
}
Self { name, phantom_data: PhantomData, phantom_data2: PhantomData }
}
}
impl<Value, Borrowed: ?Sized> RawVariable for SimpleVariable<Value, Borrowed> {
impl<Value, Borrowed: ?Sized> const RawVariable for SimpleVariable<Value, Borrowed> {
fn name(&self) -> &str {
&self.name
self.name
}
}
@ -266,110 +279,18 @@ pub mod new {
}
}
//
//
// impl<'a, T> SpecFromIter<T> for std::slice::Iter<'a, T> {
// fn f(&self) {}
// }
#[derive(Clone, Copy, Debug, Display, Ord, PartialOrd, Eq, PartialEq)]
pub struct StrLikeVariable {
pub name: &'static str,
}
impl StrLikeVariable {
pub const fn new(name: &'static str) -> Self {
Self { name }
}
}
impl Variable for StrLikeVariable {
const NAME: &'static str = "";
fn name(&self) -> &str {
self.name
}
}
pub trait Variable {
const NAME: &'static str;
type Value: FromString = String;
fn format(&self, value: &Self::Value) -> String
where Self::Value: ToString {
value.to_string()
}
fn name(&self) -> &str {
Self::NAME
}
fn fetch(&self) -> Result<Self::Value> {
self.fetch_as()
}
fn fetch_as<T: FromString>(&self) -> Result<T> {
self.fetch_string()?.parse2()
}
fn fetch_string(&self) -> Result<String> {
expect_var(self.name())
}
fn fetch_os_string(&self) -> Result<OsString> {
expect_var_os(self.name())
}
fn set(&self, value: &Self::Value)
where Self::Value: ToString {
debug!("Setting env {}={}", self.name(), self.format(value));
std::env::set_var(self.name(), self.format(value))
}
fn set_os(&self, value: &Self::Value)
where Self::Value: AsRef<OsStr> {
std::env::set_var(self.name(), value)
}
fn set_path<P>(&self, value: &P)
where
Self::Value: AsRef<Path>,
P: AsRef<Path>, {
std::env::set_var(self.name(), value.as_ref())
}
fn emit_env(&self, value: &Self::Value) -> Result
where Self::Value: ToString {
crate::actions::workflow::set_env(self.name(), value)
}
fn emit(&self, value: &Self::Value) -> Result
where Self::Value: ToString {
self.emit_env(value)?;
crate::actions::workflow::set_output(self.name(), value);
Ok(())
}
fn is_set(&self) -> bool {
self.fetch_os_string().is_ok()
}
fn remove(&self) {
std::env::remove_var(self.name())
}
}
const PATH_ENVIRONMENT_NAME: &str = "PATH";
pub fn expect_var(name: impl AsRef<str>) -> Result<String> {
let name = name.as_ref();
std::env::var(name).context(anyhow!("Missing environment variable {}.", name))
std::env::var(name).with_context(|| anyhow!("Missing environment variable {name}."))
}
pub fn expect_var_os(name: impl AsRef<OsStr>) -> Result<OsString> {
let name = name.as_ref();
std::env::var_os(name)
.ok_or_else(|| anyhow!("Missing environment variable {}.", name.to_string_lossy()))
.with_context(|| anyhow!("Missing environment variable {}.", name.to_string_lossy()))
}
pub fn prepend_to_path(path: impl Into<PathBuf>) -> Result {
@ -383,17 +304,17 @@ pub fn prepend_to_path(path: impl Into<PathBuf>) -> Result {
Ok(())
}
pub async fn fix_duplicated_env_var(var_name: impl AsRef<OsStr>) -> Result {
let var_name = var_name.as_ref();
let mut paths = indexmap::IndexSet::new();
while let Ok(path) = std::env::var(var_name) {
paths.extend(std::env::split_paths(&path));
std::env::remove_var(var_name);
}
std::env::set_var(var_name, std::env::join_paths(paths)?);
Ok(())
}
// pub async fn fix_duplicated_env_var(var_name: impl AsRef<OsStr>) -> Result {
// let var_name = var_name.as_ref();
//
// let mut paths = indexmap::IndexSet::new();
// while let Ok(path) = std::env::var(var_name) {
// paths.extend(std::env::split_paths(&path));
// std::env::remove_var(var_name);
// }
// std::env::set_var(var_name, std::env::join_paths(paths)?);
// Ok(())
// }
#[derive(Clone, Debug)]
pub enum Action {

View File

@ -5,15 +5,8 @@
pub mod child;
pub mod clap;
pub mod command;
pub mod from_string;
pub mod future;
pub mod iterator;
pub mod maps;
pub mod octocrab;
pub mod os;
pub mod output;
pub mod path;
pub mod reqwest;
pub mod result;
pub mod str;
pub mod version;

View File

@ -1,100 +0,0 @@
use crate::prelude::*;
use serde::de::DeserializeOwned;
pub trait PathExt: AsRef<Path> {
fn join_iter<P: AsRef<Path>>(&self, segments: impl IntoIterator<Item = P>) -> PathBuf {
let mut ret = self.as_ref().to_path_buf();
ret.extend(segments);
ret
}
/// Strips the leading `\\?\` prefix from Windows paths if present.
fn without_verbatim_prefix(&self) -> &Path {
self.as_str().strip_prefix(r"\\?\").map_or(self.as_ref(), Path::new)
}
/// Appends a new extension to the file.
///
/// Does not try to replace previous extension, unlike `set_extension`.
/// Does nothing when given extension string is empty.
///
/// ```
/// use ide_ci::extensions::path::PathExt;
/// use std::path::PathBuf;
///
/// let path = PathBuf::from("foo.tar").with_appended_extension("gz");
/// assert_eq!(path, PathBuf::from("foo.tar.gz"));
///
/// let path = PathBuf::from("foo").with_appended_extension("zip");
/// assert_eq!(path, PathBuf::from("foo.zip"));
/// ```
fn with_appended_extension(&self, extension: impl AsRef<OsStr>) -> PathBuf {
if extension.as_ref().is_empty() {
return self.as_ref().into();
} else {
let mut ret = self.as_ref().to_path_buf().into_os_string();
ret.push(".");
ret.push(extension.as_ref());
ret.into()
}
}
#[context("Failed to deserialize file `{}` as type `{}`.", self.as_ref().display(), std::any::type_name::<T>())]
fn read_to_json<T: DeserializeOwned>(&self) -> Result<T> {
let content = crate::fs::read_to_string(self)?;
serde_json::from_str(&content).anyhow_err()
}
fn write_as_json<T: Serialize>(&self, value: &T) -> Result {
trace!("Writing JSON to {}.", self.as_ref().display());
let file = crate::fs::create(self)?;
serde_json::to_writer(file, value).anyhow_err()
}
fn write_as_yaml<T: Serialize>(&self, value: &T) -> Result {
trace!("Writing YAML to {}.", self.as_ref().display());
let file = crate::fs::create(self)?;
serde_yaml::to_writer(file, value).anyhow_err()
}
fn as_str(&self) -> &str {
self.as_ref().to_str().unwrap()
}
/// Split path to components and collect them into a new PathBuf.
///
/// This is useful for `/` -> native separator conversion.
fn normalize(&self) -> PathBuf {
self.as_ref().components().collect()
}
/// Like `parent` but provides a sensible error message if the path has no parent.
fn try_parent(&self) -> Result<&Path> {
self.as_ref()
.parent()
.with_context(|| format!("Failed to get parent of path `{}`.", self.as_ref().display()))
}
}
impl<T: AsRef<Path>> PathExt for T {}
pub fn display_fmt(path: &Path, f: &mut Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(&path.display(), f)
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn stripping_unc_prefix() {
let path_with_unc = Path::new(r"\\?\H:\NBO\ci-build\target\debug\enso-build2.exe");
let path_without_unc = Path::new(r"H:\NBO\ci-build\target\debug\enso-build2.exe");
assert_eq!(path_with_unc.without_verbatim_prefix(), path_without_unc);
assert_eq!(path_without_unc.without_verbatim_prefix(), path_without_unc);
}
}

View File

@ -1,28 +0,0 @@
use crate::prelude::*;
use anyhow::Context;
use std::any::type_name;
pub trait StrLikeExt {
// FIXME: this needs better name!
fn parse2<T: FromString>(&self) -> Result<T>;
fn parse_through<T, R>(&self) -> Result<R>
where
T: FromString + TryInto<R>,
<T as TryInto<R>>::Error: Into<anyhow::Error>, {
self.parse2::<T>()?.try_into().anyhow_err().context(format!(
"Failed to convert {} => {}.",
type_name::<Self>(),
type_name::<R>(),
))
}
}
impl<T: AsRef<str>> StrLikeExt for T {
fn parse2<U: FromString>(&self) -> Result<U> {
U::from_str(self.as_ref())
}
}

View File

@ -3,7 +3,6 @@ use crate::prelude::*;
use async_compression::tokio::bufread::GzipEncoder;
use async_compression::Level;
use fs_extra::dir::CopyOptions;
use std::fs::File;
// ==============
@ -13,134 +12,14 @@ use std::fs::File;
pub mod tokio;
pub mod wrappers;
pub use wrappers::*;
pub use enso_build_base::fs::*;
/////////////////////////////
/// Like the standard version but will create any missing parent directories from the path.
#[context("Failed to write path: {}", path.as_ref().display())]
pub fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
create_parent_dir_if_missing(&path)?;
wrappers::write(&path, &contents)
}
/// Serialize the data to JSON text and write it to the file.
///
/// See [`write`].
#[context("Failed to write path: {}", path.as_ref().display())]
pub fn write_json(path: impl AsRef<Path>, contents: &impl Serialize) -> Result {
let contents = serde_json::to_string(contents)?;
write(&path, &contents)
}
/// Like the standard version but will create any missing parent directories from the path.
#[context("Failed to open path for writing: {}", path.as_ref().display())]
pub fn create(path: impl AsRef<Path>) -> Result<File> {
create_parent_dir_if_missing(&path)?;
wrappers::create(&path)
}
///////////////////////////
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read_string_into<T: FromString>(path: impl AsRef<Path>) -> Result<T> {
read_to_string(&path)?.parse2()
}
/// Create a directory (and all missing parent directories),
///
/// Does not fail when a directory already exists.
#[context("Failed to create directory {}", path.as_ref().display())]
pub fn create_dir_if_missing(path: impl AsRef<Path>) -> Result {
let result = std::fs::create_dir_all(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => Ok(()),
result => result.anyhow_err(),
}
}
/// Create a parent directory for path (and all missing parent directories),
///
/// Does not fail when a directory already exists.
#[context("Failed to create parent directory for {}", path.as_ref().display())]
pub fn create_parent_dir_if_missing(path: impl AsRef<Path>) -> Result<PathBuf> {
if let Some(parent) = path.as_ref().parent() {
create_dir_if_missing(parent)?;
Ok(parent.into())
} else {
bail!("No parent directory for path {}.", path.as_ref().display())
}
}
/// Remove a directory with all its subtree.
///
/// Does not fail if the directory is not found.
#[tracing::instrument(fields(path = %path.as_ref().display()))]
#[context("Failed to remove directory {}", path.as_ref().display())]
pub fn remove_dir_if_exists(path: impl AsRef<Path>) -> Result {
let result = std::fs::remove_dir_all(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
result => result.anyhow_err(),
}
}
/// Remove a regular file.
///
/// Does not fail if the file is not found.
#[tracing::instrument(fields(path = %path.as_ref().display()))]
#[context("Failed to remove file {}", path.as_ref().display())]
pub fn remove_file_if_exists(path: impl AsRef<Path>) -> Result<()> {
let result = std::fs::remove_file(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
result => result.anyhow_err(),
}
}
/// Remove a file being either directory or regular file..
///
/// Does not fail if the file is not found.
#[context("Failed to remove entry {} (if exists)", path.as_ref().display())]
pub fn remove_if_exists(path: impl AsRef<Path>) -> Result {
let path = path.as_ref();
if path.is_dir() {
remove_dir_if_exists(path)
} else {
remove_file_if_exists(path)
}
}
#[context("Failed to create symlink {} => {}", src.as_ref().display(), dst.as_ref().display())]
pub fn symlink_auto(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result {
create_parent_dir_if_missing(&dst)?;
symlink::symlink_auto(&src, &dst).anyhow_err()
}
/// Recreate directory, so it exists and is empty.
pub fn reset_dir(path: impl AsRef<Path>) -> Result {
let path = path.as_ref();
debug!("Will reset directory {}", path.display());
remove_dir_if_exists(path)?;
create_dir_if_missing(path)?;
Ok(())
}
pub fn require_exist(path: impl AsRef<Path>) -> Result {
if path.as_ref().exists() {
trace!("{} does exist.", path.as_ref().display());
Ok(())
} else {
bail!("{} does not exist.", path.as_ref().display())
}
}
#[tracing::instrument(skip_all, fields(
src = %source_file.as_ref().display(),
dest = %dest_dir.as_ref().display()),
err)]
src = %source_file.as_ref().display(),
dest = %dest_dir.as_ref().display()),
err)]
pub fn copy_to(source_file: impl AsRef<Path>, dest_dir: impl AsRef<Path>) -> Result {
require_exist(&source_file)?;
create_dir_if_missing(dest_dir.as_ref())?;
@ -153,9 +32,9 @@ pub fn copy_to(source_file: impl AsRef<Path>, dest_dir: impl AsRef<Path>) -> Res
#[tracing::instrument(skip_all, fields(
src = %source_file.as_ref().display(),
dest = %destination_file.as_ref().display()),
err)]
src = %source_file.as_ref().display(),
dest = %destination_file.as_ref().display()),
err)]
pub fn copy(source_file: impl AsRef<Path>, destination_file: impl AsRef<Path>) -> Result {
let source_file = source_file.as_ref();
let destination_file = destination_file.as_ref();
@ -168,7 +47,7 @@ pub fn copy(source_file: impl AsRef<Path>, destination_file: impl AsRef<Path>) -
options.content_only = true;
fs_extra::dir::copy(source_file, destination_file, &options)?;
} else {
wrappers::copy(source_file, destination_file)?;
enso_build_base::fs::wrappers::copy(source_file, destination_file)?;
}
} else {
bail!("Cannot copy to the root path: {}", destination_file.display());
@ -176,9 +55,6 @@ pub fn copy(source_file: impl AsRef<Path>, destination_file: impl AsRef<Path>) -
Ok(())
}
pub fn same_existing_path(source: impl AsRef<Path>, destination: impl AsRef<Path>) -> Result<bool> {
Ok(canonicalize(source)? == canonicalize(destination)?)
}
pub async fn mirror_directory(source: impl AsRef<Path>, destination: impl AsRef<Path>) -> Result {
create_dir_if_missing(destination.as_ref())?;
@ -195,47 +71,6 @@ pub async fn mirror_directory(source: impl AsRef<Path>, destination: impl AsRef<
}
}
#[context("Failed because the path does not point to a directory: {}", path.as_ref().display())]
pub fn expect_dir(path: impl AsRef<Path>) -> Result {
let filetype = metadata(&path)?.file_type();
if filetype.is_dir() {
Ok(())
} else {
bail!("File is not directory, its type is: {filetype:?}")
}
}
#[context("Failed because the path does not point to a regular file: {}", path.as_ref().display())]
pub fn expect_file(path: impl AsRef<Path>) -> Result {
let filetype = metadata(&path)?.file_type();
if filetype.is_file() {
Ok(())
} else {
bail!("File is not a regular file, its type is: {filetype:?}")
}
}
#[cfg(not(target_os = "windows"))]
#[context("Failed to update permissions on `{}`", path.as_ref().display())]
pub fn allow_owner_execute(path: impl AsRef<Path>) -> Result {
use crate::anyhow::ResultExt;
use std::os::unix::prelude::*;
debug!("Setting executable permission on {}", path.as_ref().display());
let metadata = path.as_ref().metadata()?;
let mut permissions = metadata.permissions();
let mode = permissions.mode();
let owner_can_execute = 0o0100;
permissions.set_mode(mode | owner_can_execute);
std::fs::set_permissions(path.as_ref(), permissions).anyhow_err()
}
#[cfg(target_os = "windows")]
#[context("Failed to update permissions on `{}`", path.as_ref().display())]
pub fn allow_owner_execute(path: impl AsRef<Path>) -> Result {
// No-op on Windows.
Ok(())
}
/// Get the size of a file after gzip compression.
pub async fn compressed_size(path: impl AsRef<Path>) -> Result<byte_unit::Byte> {
@ -244,39 +79,10 @@ pub async fn compressed_size(path: impl AsRef<Path>) -> Result<byte_unit::Byte>
crate::io::read_length(encoded_stream).await.map(into)
}
pub fn check_if_identical(source: impl AsRef<Path>, target: impl AsRef<Path>) -> bool {
(|| -> Result<bool> {
#[allow(clippy::if_same_then_else)] // should be different after TODO
if metadata(&source)?.len() == metadata(&target)?.len() {
Ok(true)
} else if read(&source)? == read(&target)? {
// TODO: Not good for large files, should process them chunk by chunk.
Ok(true)
} else {
Ok(false)
}
})()
.unwrap_or(false)
}
pub fn copy_file_if_different(source: impl AsRef<Path>, target: impl AsRef<Path>) -> Result {
if !check_if_identical(&source, &target) {
trace!(
"Modified, will copy {} to {}.",
source.as_ref().display(),
target.as_ref().display()
);
copy(&source, &target)?;
} else {
trace!("No changes, skipping {}.", source.as_ref().display())
}
Ok(())
}
#[tracing::instrument(skip_all, fields(
src = %source.as_ref().display(),
dest = %target.as_ref().display()),
err)]
src = %source.as_ref().display(),
dest = %target.as_ref().display()),
err)]
pub async fn copy_if_different(source: impl AsRef<Path>, target: impl AsRef<Path>) -> Result {
if tokio::metadata(&source).await?.is_file() {
return copy_file_if_different(source, target);
@ -293,17 +99,8 @@ pub async fn copy_if_different(source: impl AsRef<Path>, target: impl AsRef<Path
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::log::setup_logging;
use ::tokio;
#[tokio::test]
#[ignore]
async fn copy_if_different_test() -> Result {
setup_logging()?;
copy_if_different("../../..", r"C:\temp\out").await?;
Ok(())
}
#[context("Failed to create symlink {} => {}", src.as_ref().display(), dst.as_ref().display())]
pub fn symlink_auto(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result {
create_parent_dir_if_missing(&dst)?;
symlink::symlink_auto(&src, &dst).anyhow_err()
}

View File

@ -50,6 +50,12 @@ pub async fn create_parent_dir_if_missing(path: impl AsRef<Path>) -> Result<Path
}
}
#[context("Failed to write file: {}", path.as_ref().display())]
pub async fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
create_parent_dir_if_missing(&path).await?;
crate::fs::wrappers::tokio::write(&path, &contents).await.anyhow_err()
}
pub async fn copy_to_file(
mut content: impl AsyncRead + Unpin,
output_path: impl AsRef<Path>,
@ -71,6 +77,15 @@ pub async fn remove_dir_if_exists(path: impl AsRef<Path>) -> Result {
}
}
pub async fn perhaps_remove_dir_if_exists(dry_run: bool, path: impl AsRef<Path>) -> Result {
if dry_run {
info!("Would remove directory {}.", path.as_ref().display());
Ok(())
} else {
remove_dir_if_exists(path).await
}
}
/// Recreate directory, so it exists and is empty.
pub async fn reset_dir(path: impl AsRef<Path>) -> Result {
let path = path.as_ref();
@ -94,3 +109,18 @@ pub async fn write_iter(
})?;
Ok(())
}
/// Append contents to the file.
///
/// If the file does not exist, it will be created.
pub async fn append(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
tokio::fs::OpenOptions::new()
.append(true)
.create(true)
.open(&path)
.await
.with_context(|| format!("Failed to open file {} for appending.", path.as_ref().display()))?
.write_all(contents.as_ref())
.await
.with_context(|| format!("Failed to write to file {}.", path.as_ref().display()))
}

View File

@ -1,82 +1,5 @@
//! Wrappers over [`std::fs`] functions that provide sensible error messages, i.e. explaining what
//! operation was attempted and what was the relevant path.
use crate::prelude::*;
use std::fs::File;
use std::fs::Metadata;
use std::io::Write;
// ==============
// === Export ===
// ==============
pub mod tokio;
#[context("Failed to obtain metadata for file: {}", path.as_ref().display())]
pub fn metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> {
std::fs::metadata(&path).anyhow_err()
}
#[context("Failed to copy file from {} to {}", from.as_ref().display(), to.as_ref().display())]
pub fn copy(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<u64> {
std::fs::copy(&from, &to).anyhow_err()
}
#[context("Failed to rename file from {} to {}", from.as_ref().display(), to.as_ref().display())]
pub fn rename(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result {
std::fs::rename(&from, &to).anyhow_err()
}
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
std::fs::read(&path).anyhow_err()
}
#[context("Failed to read the directory: {}", path.as_ref().display())]
pub fn read_dir(path: impl AsRef<Path>) -> Result<std::fs::ReadDir> {
std::fs::read_dir(&path).anyhow_err()
}
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
std::fs::read_to_string(&path).anyhow_err()
}
#[context("Failed to write path: {}", path.as_ref().display())]
pub fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
std::fs::write(&path, contents).anyhow_err()
}
pub fn append(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
std::fs::OpenOptions::new()
.append(true)
.create(true)
.open(&path)
.context(format!("Failed to open {} for writing.", path.as_ref().display()))?
.write_all(contents.as_ref())
.context(format!("Failed to write to {}.", path.as_ref().display()))
}
#[context("Failed to open path for reading: {}", path.as_ref().display())]
pub fn open(path: impl AsRef<Path>) -> Result<File> {
File::open(&path).anyhow_err()
}
#[context("Failed to open path for writing: {}", path.as_ref().display())]
pub fn create(path: impl AsRef<Path>) -> Result<File> {
File::create(&path).anyhow_err()
}
#[context("Failed to canonicalize path: {}", path.as_ref().display())]
pub fn canonicalize(path: impl AsRef<Path>) -> Result<PathBuf> {
std::fs::canonicalize(&path).anyhow_err()
}
#[context("Failed to create missing directories no path: {}", path.as_ref().display())]
pub fn create_dir_all(path: impl AsRef<Path>) -> Result {
std::fs::create_dir_all(&path).anyhow_err()
}

View File

@ -2,12 +2,13 @@ use crate::prelude::*;
use tokio::fs::File;
use tokio::io::AsyncReadExt;
use tokio_util::io::ReaderStream;
#[context("Failed to obtain metadata for file: {}", path.as_ref().display())]
pub async fn metadata<P: AsRef<Path>>(path: P) -> Result<std::fs::Metadata> {
tokio::fs::metadata(&path).await.anyhow_err()
pub fn metadata<P: AsRef<Path>>(path: P) -> BoxFuture<'static, Result<std::fs::Metadata>> {
let path = path.as_ref().to_owned();
tokio::fs::metadata(path).anyhow_err().boxed()
}
#[context("Failed to open path for reading: {}", path.as_ref().display())]
@ -15,6 +16,17 @@ pub async fn open(path: impl AsRef<Path>) -> Result<File> {
File::open(&path).await.anyhow_err()
}
// #[context("Failed to open path for reading: {}", path.as_ref().display())]
pub fn open_stream(path: impl AsRef<Path>) -> BoxFuture<'static, Result<ReaderStream<File>>> {
let path = path.as_ref().to_owned();
let file = open(path);
async move {
let file = file.await?;
Ok(ReaderStream::new(file))
}
.boxed()
}
#[context("Failed to open path for writing: {}", path.as_ref().display())]
pub async fn create(path: impl AsRef<Path>) -> Result<File> {
File::create(&path).await.anyhow_err()

View File

@ -4,6 +4,13 @@ use futures_util::future::OptionFuture;
pub fn receiver_to_stream<T>(
mut receiver: tokio::sync::mpsc::Receiver<T>,
) -> impl Stream<Item = T> {
futures::stream::poll_fn(move |ctx| receiver.poll_recv(ctx))
}
#[derive(Copy, Clone, Debug)]
pub enum AsyncPolicy {
Sequential,

View File

@ -1,19 +1,7 @@
use crate::prelude::*;
use crate::cache::download::DownloadFile;
use anyhow::Context;
use headers::HeaderMap;
use headers::HeaderValue;
use octocrab::models::repos::Asset;
use octocrab::models::repos::Release;
use octocrab::models::workflows::WorkflowListArtifact;
use octocrab::models::ArtifactId;
use octocrab::models::AssetId;
use octocrab::models::ReleaseId;
use octocrab::models::RunId;
use octocrab::params::actions::ArchiveFormat;
use reqwest::Response;
@ -21,6 +9,12 @@ const MAX_PER_PAGE: u8 = 100;
pub mod model;
pub mod release;
pub mod repo;
pub mod workflow;
pub use repo::Repo;
pub use repo::RepoRef;
/// Goes over all the pages and returns result.
///
@ -35,170 +29,8 @@ pub async fn get_all<T: DeserializeOwned>(
client.all_pages(first_page).await
}
/// Entity that uniquely identifies a GitHub-hosted repository.
#[async_trait]
pub trait RepoPointer: Display {
fn owner(&self) -> &str;
fn name(&self) -> &str;
/// Generate a token that can be used to register a new runner for this repository.
async fn generate_runner_registration_token(
&self,
octocrab: &Octocrab,
) -> Result<model::RegistrationToken> {
let path =
iformat!("/repos/{self.owner()}/{self.name()}/actions/runners/registration-token");
let url = octocrab.absolute_url(path)?;
octocrab.post(url, EMPTY_REQUEST_BODY).await.context(format!(
"Failed to generate a runner registration token for the {self} repository."
))
}
/// The repository's URL.
fn url(&self) -> Result<Url> {
let url_text = iformat!("https://github.com/{self.owner()}/{self.name()}");
Url::parse(&url_text)
.context(format!("Failed to generate an URL for the {self} repository."))
}
fn repos<'a>(&'a self, client: &'a Octocrab) -> octocrab::repos::RepoHandler<'a> {
client.repos(self.owner(), self.name())
}
async fn all_releases(&self, client: &Octocrab) -> Result<Vec<Release>> {
get_all(client, self.repos(client).releases().list().per_page(MAX_PER_PAGE).send())
.await
.context(format!("Failed to list all releases in the {self} repository."))
}
async fn latest_release(&self, client: &Octocrab) -> Result<Release> {
self.repos(client)
.releases()
.get_latest()
.await
.context(format!("Failed to get the latest release in the {self} repository."))
}
async fn find_release_by_id(
&self,
client: &Octocrab,
release_id: ReleaseId,
) -> Result<Release> {
let repo_handler = self.repos(client);
let releases_handler = repo_handler.releases();
releases_handler
.get_by_id(release_id)
.await
.context(format!("Failed to find release by id `{release_id}` in `{self}`."))
}
#[tracing::instrument(skip(client), fields(%self, %text), err)]
async fn find_release_by_text(&self, client: &Octocrab, text: &str) -> anyhow::Result<Release> {
self.all_releases(client)
.await?
.into_iter()
.find(|release| release.tag_name.contains(text))
.inspect(|release| info!("Found release at: {} (id={}).", release.html_url, release.id))
.context(format!("No release with tag matching `{text}` in {self}."))
}
#[tracing::instrument(skip(client), fields(%self, %run_id, %name), err, ret)]
async fn find_artifact_by_name(
&self,
client: &Octocrab,
run_id: RunId,
name: &str,
) -> Result<WorkflowListArtifact> {
let artifacts = client
.actions()
.list_workflow_run_artifacts(self.owner(), self.name(), run_id)
.per_page(100)
.send()
.await
.context(format!("Failed to list artifacts of run {run_id} in {self}."))?
.value
.context("Failed to find any artifacts.")?;
artifacts
.into_iter()
.find(|artifact| artifact.name == name)
.context(format!("Failed to find artifact by name '{name}'."))
}
async fn download_artifact(&self, client: &Octocrab, artifact_id: ArtifactId) -> Result<Bytes> {
client
.actions()
.download_artifact(self.owner(), self.name(), artifact_id, ArchiveFormat::Zip)
.await
.context(format!("Failed to download artifact with ID={artifact_id}."))
}
async fn download_and_unpack_artifact(
&self,
client: &Octocrab,
artifact_id: ArtifactId,
output_dir: &Path,
) -> Result {
let bytes = self.download_artifact(client, artifact_id).await?;
crate::archive::zip::extract_bytes(bytes, output_dir)?;
Ok(())
}
#[tracing::instrument(name="Get the asset information.", skip(client), fields(self=%self), err)]
async fn asset(&self, client: &Octocrab, asset_id: AssetId) -> Result<Asset> {
self.repos(client).releases().get_asset(asset_id).await.anyhow_err()
}
fn download_asset_job(&self, octocrab: &Octocrab, asset_id: AssetId) -> DownloadFile {
let path = iformat!("/repos/{self.owner()}/{self.name()}/releases/assets/{asset_id}");
// Unwrap will work, because we are appending relative URL constant.
let url = octocrab.absolute_url(path).unwrap();
DownloadFile {
client: octocrab.client.clone(),
key: crate::cache::download::Key {
url,
additional_headers: HeaderMap::from_iter([(
reqwest::header::ACCEPT,
HeaderValue::from_static(mime::APPLICATION_OCTET_STREAM.as_ref()),
)]),
},
}
}
#[tracing::instrument(name="Download the asset.", skip(client), fields(self=%self), err)]
async fn download_asset(&self, client: &Octocrab, asset_id: AssetId) -> Result<Response> {
self.download_asset_job(client, asset_id).send_request().await
}
#[tracing::instrument(name="Download the asset to a file.", skip(client, output_path), fields(self=%self, dest=%output_path.as_ref().display()), err)]
async fn download_asset_as(
&self,
client: &Octocrab,
asset_id: AssetId,
output_path: impl AsRef<Path> + Send + Sync + 'static,
) -> Result {
let response = self.download_asset(client, asset_id).await?;
crate::io::web::stream_response_to_file(response, &output_path).await
}
#[tracing::instrument(name="Download the asset to a directory.",
skip(client, output_dir, asset),
fields(self=%self, dest=%output_dir.as_ref().display(), id = %asset.id),
err)]
async fn download_asset_to(
&self,
client: &Octocrab,
asset: &Asset,
output_dir: impl AsRef<Path> + Send + Sync + 'static,
) -> Result<PathBuf> {
let output_path = output_dir.as_ref().join(&asset.name);
self.download_asset_as(client, asset.id, output_path.clone()).await?;
Ok(output_path)
}
}
#[async_trait]
pub trait OrganizationPointer {
pub trait IsOrganization {
/// Organization name.
fn name(&self) -> &str;
@ -221,7 +53,7 @@ pub trait OrganizationPointer {
/// Get the biggest asset containing given text.
#[instrument(skip(release), fields(id = %release.id, url = %release.url), err)]
pub fn find_asset_by_text<'a>(release: &'a Release, text: &str) -> anyhow::Result<&'a Asset> {
pub fn find_asset_by_text<'a>(release: &'a Release, text: &str) -> Result<&'a Asset> {
release
.assets
.iter()
@ -235,7 +67,7 @@ pub fn find_asset_by_text<'a>(release: &'a Release, text: &str) -> anyhow::Resul
/// Get the biggest asset containing given text.
#[instrument(skip(release), fields(id = %release.id, url = %release.url), ret(Display), err)]
pub fn find_asset_url_by_text<'a>(release: &'a Release, text: &str) -> anyhow::Result<&'a Url> {
pub fn find_asset_url_by_text<'a>(release: &'a Release, text: &str) -> Result<&'a Url> {
let matching_asset = find_asset_by_text(release, text)?;
Ok(&matching_asset.browser_download_url)
}
@ -244,7 +76,7 @@ pub fn find_asset_url_by_text<'a>(release: &'a Release, text: &str) -> anyhow::R
///
/// Octocrab client does not need to bo authorized with a PAT for this. However, being authorized
/// will help with GitHub API query rate limits.
pub async fn latest_runner_url(octocrab: &Octocrab, os: OS) -> anyhow::Result<Url> {
pub async fn latest_runner_url(octocrab: &Octocrab, os: OS) -> Result<Url> {
let latest_release = octocrab.repos("actions", "runner").releases().get_latest().await?;
let os_name = match os {

View File

@ -1,46 +1,134 @@
use crate::prelude::*;
use crate::github::Repo;
use mime::Mime;
use octocrab::models::repos::Asset;
use octocrab::models::repos::Release;
use octocrab::models::ReleaseId;
use reqwest::Body;
use tracing::instrument;
#[context("Failed to upload the asset {}", asset.as_ref().display())]
#[instrument(skip_all, fields(source = %asset.as_ref().display(), %repo, %release))]
pub async fn upload_asset(
repo: &(impl RepoPointer + Send + Sync + 'static),
client: &reqwest::Client,
release: ReleaseId,
asset: impl AsRef<Path> + Send + Sync,
) -> Result {
let upload_url = format!(
"https://uploads.github.com/repos/{}/{}/releases/{}/assets",
repo.owner(),
repo.name(),
release
);
let asset_path = asset.as_ref();
let mime = new_mime_guess::from_path(asset_path).first_or_octet_stream();
let file = tokio::fs::File::open(asset_path).await?;
let file_size = file.metadata().await?.len();
let file_contents_stream = tokio_util::io::ReaderStream::new(file);
let body = Body::wrap_stream(file_contents_stream);
let asset_name = asset_path.file_name().unwrap().to_string_lossy();
let request = client
.post(upload_url)
.query(&[("name", asset_name.as_ref())])
.header(reqwest::header::ACCEPT, "application/vnd.github.v3+json")
.header(reqwest::header::CONTENT_TYPE, mime.to_string())
.header(reqwest::header::CONTENT_LENGTH, file_size)
.body(body)
.build()?;
/// Types that uniquely identify a release and can be used to fetch it from GitHub.
pub trait IsRelease: Debug {
/// The release ID.
fn id(&self) -> ReleaseId;
dbg!(&request);
let response = client.execute(request).await?;
dbg!(&response);
response.error_for_status()?;
Ok(())
/// The repository where the release is located.
fn repo(&self) -> Repo;
/// Client used to perform GitHub API operations.
fn octocrab(&self) -> &Octocrab;
}
#[async_trait]
pub trait IsReleaseExt: IsRelease + Sync {
/// Upload a new asset to the release.
fn upload_asset(
&self,
asset_name: impl AsRef<str>,
content_type: Mime,
content_length: u64,
body: impl Into<Body>,
) -> BoxFuture<'static, Result<Asset>> {
let upload_url = format!(
"https://uploads.github.com/repos/{repo}/releases/{release_id}/assets",
repo = self.repo(),
release_id = self.id(),
);
let body = body.into();
let request = self
.octocrab()
.client
.post(upload_url)
.query(&[("name", asset_name.as_ref())])
.header(reqwest::header::ACCEPT, "application/vnd.github.v3+json")
.header(reqwest::header::CONTENT_TYPE, content_type.to_string())
.header(reqwest::header::CONTENT_LENGTH, content_length)
.body(body);
async move {
crate::io::web::execute(request).await?.json().await.context("Failed to upload asset.")
}
.boxed()
}
/// Upload a new asset to the release from a given file.
///
/// The filename will be used to name the asset and deduce MIME content type.
// #[context("Failed to upload an asset from the file under {}", path.as_ref().display())]
#[instrument(skip_all, fields(source = %path.as_ref().display()))]
async fn upload_asset_file(&self, path: impl AsRef<Path> + Send) -> Result<Asset> {
let error_msg =
format!("Failed to upload an asset from the file under {}", path.as_ref().display());
async move {
let path = path.as_ref().to_path_buf();
let asset_name = path.file_name().with_context(|| {
format!("The given path {} does not contain a filename.", path.display())
})?;
let content_type = new_mime_guess::from_path(&path).first_or_octet_stream();
let file_size = crate::fs::tokio::metadata(&path).await?.len();
let file = crate::fs::tokio::open_stream(&path).await?;
let body = Body::wrap_stream(file);
self.upload_asset(asset_name.as_str(), content_type, file_size, body).await
}
.await
.context(error_msg)
}
async fn upload_compressed_dir(&self, path: impl AsRef<Path> + Send) -> Result<Asset> {
let dir_to_upload = path.as_ref();
let temp_dir = tempfile::tempdir()?;
let archive_path =
dir_to_upload.with_parent(temp_dir.path()).with_appended_extension("tar.gz");
crate::archive::compress_directory(&archive_path, &dir_to_upload).await?;
self.upload_asset_file(archive_path).await
}
async fn get(&self) -> Result<Release> {
self.octocrab()
.repos(self.repo().owner(), self.repo().name())
.releases()
.get_by_id(self.id())
.await
.anyhow_err()
}
}
impl<T> IsReleaseExt for T where T: IsRelease + Sync {}
/// A release on GitHub.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
pub struct ReleaseHandle {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub repo: Repo,
pub id: ReleaseId,
#[derivative(Debug = "ignore")]
pub octocrab: Octocrab,
}
impl IsRelease for ReleaseHandle {
fn id(&self) -> ReleaseId {
self.id
}
fn repo(&self) -> Repo {
self.repo.clone()
}
fn octocrab(&self) -> &Octocrab {
&self.octocrab
}
}
impl ReleaseHandle {
pub fn new(octocrab: &Octocrab, repo: impl Into<Repo>, id: ReleaseId) -> Self {
let repo = repo.into();
Self { repo, id, octocrab: octocrab.clone() }
}
}
#[cfg(test)]

View File

@ -0,0 +1,305 @@
use crate::prelude::*;
use crate::cache::download::DownloadFile;
use crate::github;
use crate::github::model;
use crate::github::MAX_PER_PAGE;
use headers::HeaderMap;
use headers::HeaderValue;
use octocrab::models::repos::Asset;
use octocrab::models::repos::Release;
use octocrab::models::workflows::WorkflowListArtifact;
use octocrab::models::ArtifactId;
use octocrab::models::AssetId;
use octocrab::models::ReleaseId;
use octocrab::models::RunId;
use octocrab::params::actions::ArchiveFormat;
use reqwest::Response;
/// Owned data denoting a specific GitHub repository.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, derive_more::Display)]
#[display(fmt = "{}/{}", owner, name)]
pub struct Repo {
/// Owner - an organization's or user's name.
pub owner: String,
pub name: String,
}
impl IsRepo for Repo {
fn owner(&self) -> &str {
&self.owner
}
fn name(&self) -> &str {
&self.name
}
}
/// Parse from strings in format "owner/name". Opposite of `Display`.
impl std::str::FromStr for Repo {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
RepoRef::try_from(s).map(Into::into)
}
}
impl<'a> From<RepoRef<'a>> for Repo {
fn from(repo: RepoRef<'a>) -> Self {
Repo { owner: repo.owner.to_owned(), name: repo.name.to_owned() }
}
}
impl Repo {
pub fn new(owner: impl Into<String>, name: impl Into<String>) -> Self {
Self { owner: owner.into(), name: name.into() }
}
}
/// Non-owning equivalent of `Repo`.
///
/// Particularly useful for defining `const` repositories.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, Serialize, derive_more::Display)]
#[display(fmt = "{}/{}", owner, name)]
pub struct RepoRef<'a> {
/// Owner - an organization's or user's name.
pub owner: &'a str,
pub name: &'a str,
}
impl<'a> IsRepo for RepoRef<'a> {
fn owner(&self) -> &str {
self.owner
}
fn name(&self) -> &str {
self.name
}
}
impl<'a> RepoRef<'a> {
pub const fn new<T1, T2>(owner: &'a T1, name: &'a T2) -> Self
where
T1: ~const AsRef<str> + ?Sized,
T2: ~const AsRef<str> + ?Sized, {
Self { owner: owner.as_ref(), name: name.as_ref() }
}
}
/// Note that we chose to implemend `TryFrom` rather than `FromStr` for `RepoRef` because
/// `FromStr` requires the parsed value to be owned (or at least lifetime-independent from input),
/// which is not the case for `RepoRef`.
impl<'a> TryFrom<&'a str> for RepoRef<'a> {
type Error = anyhow::Error;
fn try_from(value: &'a str) -> std::result::Result<Self, Self::Error> {
match value.split('/').collect_vec().as_slice() {
[owner, name] => Ok(Self { owner, name }),
slice => bail!("Failed to parse string '{}': Splitting by '/' should yield exactly 2 pieces, found: {}", value, slice.len()),
}
}
}
/// Any entity that uniquely identifies a GitHub-hosted repository.
#[async_trait]
pub trait IsRepo: Display {
fn owner(&self) -> &str;
fn name(&self) -> &str;
/// The repository's URL.
fn url(&self) -> Result<Url> {
let url_text = iformat!("https://github.com/{self.owner()}/{self.name()}");
Url::parse(&url_text)
.context(format!("Failed to generate an URL for the {self} repository."))
}
fn handle(&self, octocrab: &Octocrab) -> Handle<Self>
where Self: Clone + Sized {
Handle { repo: self.clone(), octocrab: octocrab.clone() }
}
}
/// A handle to a specific GitHub repository.
///
/// It includes a client (so also an authentication token) and a repository.
#[derive(Debug, Clone)]
pub struct Handle<Repo> {
pub octocrab: Octocrab,
pub repo: Repo,
}
impl<R: Display> Display for Handle<R> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.repo)
}
}
impl<R: IsRepo> IsRepo for Handle<R> {
fn owner(&self) -> &str {
self.repo.owner()
}
fn name(&self) -> &str {
self.repo.name()
}
}
impl<R: IsRepo> Handle<R> {
/// Create a new handle.
pub fn new(octocrab: &Octocrab, repo: R) -> Self {
Self { octocrab: octocrab.clone(), repo }
}
/// Generate a token that can be used to register a new runner for this repository.
pub async fn generate_runner_registration_token(&self) -> Result<model::RegistrationToken> {
let path =
iformat!("/repos/{self.owner()}/{self.name()}/actions/runners/registration-token");
let url = self.octocrab.absolute_url(path)?;
self.octocrab.post(url, EMPTY_REQUEST_BODY).await.context(format!(
"Failed to generate a runner registration token for the {self} repository."
))
}
pub fn repos(&self) -> octocrab::repos::RepoHandler {
self.octocrab.repos(self.owner(), self.name())
}
pub async fn all_releases(&self) -> Result<Vec<Release>> {
github::get_all(
&self.octocrab,
self.repos().releases().list().per_page(MAX_PER_PAGE).send(),
)
.await
.context(format!("Failed to list all releases in the {self} repository."))
}
pub async fn latest_release(&self) -> Result<Release> {
self.repos()
.releases()
.get_latest()
.await
.context(format!("Failed to get the latest release in the {self} repository."))
}
pub async fn find_release_by_id(&self, release_id: ReleaseId) -> Result<Release> {
let repo_handler = self.repos();
let releases_handler = repo_handler.releases();
releases_handler
.get_by_id(release_id)
.await
.context(format!("Failed to find release by id `{release_id}` in `{self}`."))
}
#[tracing::instrument(fields(%self, %text), err)]
pub async fn find_release_by_text(&self, text: &str) -> anyhow::Result<Release> {
self.all_releases()
.await?
.into_iter()
.find(|release| release.tag_name.contains(text))
.inspect(|release| info!("Found release at: {} (id={}).", release.html_url, release.id))
.context(format!("No release with tag matching `{text}` in {self}."))
}
#[tracing::instrument(fields(%self, %run_id, %name), err, ret)]
pub async fn find_artifact_by_name(
&self,
run_id: RunId,
name: &str,
) -> Result<WorkflowListArtifact> {
let artifacts = self
.octocrab
.actions()
.list_workflow_run_artifacts(self.owner(), self.name(), run_id)
.per_page(100)
.send()
.await
.context(format!("Failed to list artifacts of run {run_id} in {self}."))?
.value
.context("Failed to find any artifacts.")?;
artifacts
.into_iter()
.find(|artifact| artifact.name == name)
.context(format!("Failed to find artifact by name '{name}'."))
}
pub async fn download_artifact(&self, artifact_id: ArtifactId) -> Result<Bytes> {
self.octocrab
.actions()
.download_artifact(self.owner(), self.name(), artifact_id, ArchiveFormat::Zip)
.await
.context(format!("Failed to download artifact with ID={artifact_id}."))
}
pub async fn download_and_unpack_artifact(
&self,
artifact_id: ArtifactId,
output_dir: &Path,
) -> Result {
let bytes = self.download_artifact(artifact_id).await?;
crate::archive::zip::extract_bytes(bytes, output_dir)?;
Ok(())
}
#[tracing::instrument(name="Get the asset information.", fields(self=%self), err)]
pub async fn asset(&self, asset_id: AssetId) -> Result<Asset> {
self.repos().releases().get_asset(asset_id).await.anyhow_err()
}
pub fn download_asset_job(&self, asset_id: AssetId) -> DownloadFile {
let path = iformat!("/repos/{self.owner()}/{self.name()}/releases/assets/{asset_id}");
// Unwrap will work, because we are appending relative URL constant.
let url = self.octocrab.absolute_url(path).unwrap();
DownloadFile {
client: self.octocrab.client.clone(),
key: crate::cache::download::Key {
url,
additional_headers: HeaderMap::from_iter([(
reqwest::header::ACCEPT,
HeaderValue::from_static(mime::APPLICATION_OCTET_STREAM.as_ref()),
)]),
},
}
}
#[tracing::instrument(name="Download the asset.", fields(self=%self), err)]
pub async fn download_asset(&self, asset_id: AssetId) -> Result<Response> {
self.download_asset_job(asset_id).send_request().await
}
#[tracing::instrument(name="Download the asset to a file.", skip(output_path), fields(self=%self, dest=%output_path.as_ref().display()), err)]
pub async fn download_asset_as(
&self,
asset_id: AssetId,
output_path: impl AsRef<Path> + Send + Sync + 'static,
) -> Result {
let response = self.download_asset(asset_id).await?;
crate::io::web::stream_response_to_file(response, &output_path).await
}
#[tracing::instrument(name="Download the asset to a directory.",
skip(output_dir, asset),
fields(self=%self, dest=%output_dir.as_ref().display(), id = %asset.id),
err)]
pub async fn download_asset_to(
&self,
asset: &Asset,
output_dir: impl AsRef<Path> + Send + Sync + 'static,
) -> Result<PathBuf> {
let output_path = output_dir.as_ref().join(&asset.name);
self.download_asset_as(asset.id, output_path.clone()).await?;
Ok(output_path)
}
/// Get the repository information.
pub async fn get(&self) -> Result<octocrab::models::Repository> {
self.repos()
.get()
.await
.with_context(|| format!("Failed to get the infomation for the {self} repository."))
}
}

View File

@ -0,0 +1,38 @@
use crate::prelude::*;
use crate::github;
/// HTTP body payload for the workflow dispatch.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
pub struct RequestBody<S, T> {
/// Reference to the commit or branch to build. Should be string-like.
pub r#ref: S,
/// Inputs to the workflow.
pub inputs: T,
}
// Function that invokes GitHub API REST API workflow dispatch.
pub async fn dispatch<R: IsRepo>(
repo: &github::repo::Handle<R>,
workflow_id: impl AsRef<str> + Send + Sync + 'static,
r#ref: impl AsRef<str> + Send + Sync + 'static,
inputs: &impl Serialize,
) -> Result {
// Don't use octocrab for this, it has broken error handling!
// (treating error 404 as Ok)
let workflow_id = workflow_id.as_ref();
let name = repo.name();
let owner = repo.owner();
let url = repo.octocrab.absolute_url(format!(
"/repos/{owner}/{name}/actions/workflows/{workflow_id}/dispatches"
))?;
let r#ref = r#ref.as_ref();
let body = RequestBody { r#ref, inputs };
let response = repo.octocrab._post(url, Some(&body)).await?;
let _response = crate::io::web::handle_error_response(response).await?;
// Nothing interesting in OK response, so we just return empty struct.
Ok(())
}

View File

@ -16,6 +16,7 @@
#![feature(io_error_other)]
#![feature(string_remove_matches)]
#![feature(once_cell)]
#![feature(const_deref)]
#![feature(duration_constants)]
#![feature(const_trait_impl)]
#![feature(is_some_with)]
@ -40,7 +41,6 @@
// ==============
pub mod actions;
pub mod anyhow;
pub mod archive;
pub mod buffer;
pub mod cache;
@ -70,29 +70,12 @@ pub mod serde;
pub mod prelude {
pub use enso_build_base::prelude::*;
pub type Result<T = ()> = anyhow::Result<T>;
pub use anyhow::anyhow;
pub use anyhow::bail;
pub use anyhow::ensure;
pub use anyhow::Context as _;
pub use async_trait::async_trait;
pub use bytes::Bytes;
pub use derivative::Derivative;
pub use derive_more::Display;
pub use fn_error_context::context;
pub use futures_util::future::BoxFuture;
pub use futures_util::select;
pub use futures_util::stream::BoxStream;
pub use futures_util::try_join;
pub use futures_util::AsyncWrite;
pub use futures_util::FutureExt as _;
pub use futures_util::Stream;
pub use futures_util::StreamExt as _;
pub use futures_util::TryFuture;
pub use futures_util::TryFutureExt as _;
pub use futures_util::TryStream;
pub use futures_util::TryStreamExt as _;
pub use ifmt::iformat;
pub use itertools::Itertools;
pub use lazy_static::lazy_static;
@ -101,63 +84,16 @@ pub mod prelude {
pub use platforms::target::Arch;
pub use platforms::target::OS;
pub use semver::Version;
pub use serde::de::DeserializeOwned;
pub use serde::Deserialize;
pub use serde::Serialize;
pub use shrinkwraprs::Shrinkwrap;
pub use snafu::Snafu;
pub use std::borrow::Borrow;
pub use std::borrow::BorrowMut;
pub use std::borrow::Cow;
pub use std::collections::BTreeMap;
pub use std::collections::BTreeSet;
pub use std::collections::HashMap;
pub use std::collections::HashSet;
pub use std::default::default;
pub use std::ffi::OsStr;
pub use std::ffi::OsString;
pub use std::fmt::Debug;
pub use std::fmt::Display;
pub use std::fmt::Formatter;
pub use std::future::ready;
pub use std::future::Future;
pub use std::hash::Hash;
pub use std::io::Read;
pub use std::io::Seek;
pub use std::iter::once;
pub use std::iter::FromIterator;
pub use std::marker::PhantomData;
pub use std::ops::Deref;
pub use std::ops::DerefMut;
pub use std::ops::Range;
pub use std::path::Path;
pub use std::path::PathBuf;
pub use std::pin::pin;
pub use std::pin::Pin;
pub use std::sync::Arc;
pub use tokio::io::AsyncWriteExt as _;
pub use tracing::debug;
pub use tracing::debug_span;
pub use tracing::error;
pub use tracing::error_span;
pub use tracing::info;
pub use tracing::info_span;
pub use tracing::instrument;
pub use tracing::span;
pub use tracing::trace;
pub use tracing::trace_span;
pub use tracing::warn;
pub use tracing::warn_span;
pub use tracing::Instrument;
pub use url::Url;
pub use uuid::Uuid;
pub use crate::EMPTY_REQUEST_BODY;
pub use crate::anyhow::ResultExt;
pub use crate::env::Variable as EnvironmentVariable;
pub use crate::extensions::str::StrLikeExt;
pub use crate::github::RepoPointer;
pub use crate::extensions::output::OutputExt as _;
pub use crate::github::release::IsRelease;
pub use crate::github::repo::IsRepo;
pub use crate::goodie::Goodie;
pub use crate::log::setup_logging;
pub use crate::os::target::TARGET_ARCH;
@ -169,19 +105,13 @@ pub mod prelude {
pub use crate::program::Program;
pub use crate::program::Shell;
pub use crate::cache::goodie::GoodieExt as _;
pub use crate::env::new::RawVariable as _;
pub use crate::env::new::TypedVariable as _;
pub use crate::extensions::clap::ArgExt as _;
pub use crate::extensions::command::CommandExt as _;
pub use crate::extensions::from_string::FromString;
pub use crate::extensions::future::FutureExt as _;
pub use crate::extensions::future::TryFutureExt as _;
pub use crate::extensions::iterator::IteratorExt;
pub use crate::extensions::iterator::TryIteratorExt;
pub use crate::extensions::output::OutputExt as _;
pub use crate::extensions::path::PathExt as _;
pub use crate::extensions::result::ResultExt as _;
pub use crate::github::release::IsReleaseExt as _;
pub use crate::program::command::provider::CommandProviderExt as _;
pub use crate::program::version::IsVersion as _;
pub use crate::program::ProgramExt as _;

View File

@ -2,8 +2,9 @@
use crate::prelude::*;
use crate::github::OrganizationPointer;
use crate::github::RepoPointer;
use crate::github::repo::IsRepo;
use crate::github::IsOrganization;
use crate::github::Repo;
use crate::serde::regex_vec;
use crate::serde::single_or_sequence;
@ -42,7 +43,7 @@ impl RepoConfig {
#[serde(rename_all = "snake_case")]
pub enum RunnerLocation {
Organization(OrganizationContext),
Repository(RepoContext),
Repository(Repo),
}
impl RunnerLocation {
@ -55,7 +56,7 @@ impl RunnerLocation {
RunnerLocation::Organization(org) =>
org.generate_runner_registration_token(octocrab).await,
RunnerLocation::Repository(repo) =>
repo.generate_runner_registration_token(octocrab).await,
repo.handle(octocrab).generate_runner_registration_token().await,
}
}
@ -74,54 +75,12 @@ pub struct OrganizationContext {
pub name: String,
}
impl OrganizationPointer for OrganizationContext {
impl IsOrganization for OrganizationContext {
fn name(&self) -> &str {
&self.name
}
}
/// Data denoting a specific GitHub repository.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
pub struct RepoContext {
/// Owner - an organization's or user's name.
pub owner: String,
pub name: String,
}
impl RepoPointer for RepoContext {
fn owner(&self) -> &str {
&self.owner
}
fn name(&self) -> &str {
&self.name
}
}
impl Display for RepoContext {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}/{}", self.owner, self.name)
}
}
/// Parse from strings in format "owner/name". Opposite of `Display`.
impl std::str::FromStr for RepoContext {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s.split('/').collect_vec().as_slice() {
[owner, name] => Ok(Self { owner: owner.to_string(), name: name.to_string() }),
slice => bail!("Failed to parse string '{}': Splitting by '/' should yield exactly 2 pieces, found: {}", s, slice.len()),
}
}
}
impl RepoContext {
pub fn new(owner: impl Into<String>, name: impl Into<String>) -> Self {
Self { owner: owner.into(), name: name.into() }
}
}
/// Description of the runners deployment for a specific GitHub repository.
#[allow(clippy::large_enum_variant)] // We don't mind.
#[derive(Clone, Debug, Deserialize, Serialize)]

View File

@ -2,13 +2,18 @@ use crate::prelude::*;
/// A trie data structure, where each node represents a single fs path component.
///
/// As such, a trie defines a set of fs paths (each being defined by a path within the trie).
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct Trie<'a> {
pub children: HashMap<std::path::Component<'a>, Trie<'a>>,
/// Number of paths that end in this node.
pub count: usize,
}
impl<'a> Trie<'a> {
/// Insert a path into the trie.
pub fn insert(&mut self, path: &'a Path) {
let mut current = self;
for component in path.components() {
@ -17,6 +22,7 @@ impl<'a> Trie<'a> {
current.count += 1;
}
/// Is this node a leaf?
pub fn is_leaf(&self) -> bool {
self.children.is_empty()
}

View File

@ -232,6 +232,15 @@ pub trait IsCommandWrapper {
// let fut = self.borrow_mut_command().output();
// async move { fut.await.anyhow_err() }.boxed()
// }
fn with_current_dir(self, dir: impl AsRef<Path>) -> Self
where Self: Sized {
let mut this = self;
this.current_dir(dir);
this
}
}
impl<T: BorrowMut<tokio::process::Command>> IsCommandWrapper for T {
@ -434,12 +443,6 @@ impl Command {
this.stderr(stderr);
this
}
pub fn with_current_dir(self, dir: impl AsRef<Path>) -> Self {
let mut this = self;
this.current_dir(dir);
this
}
}
pub fn spawn_log_processor(

View File

@ -13,37 +13,79 @@ pub use clean::Clean;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Git {
/// The path to the repository root above the `working_dir`.
///
/// Many paths that git returns are relative to the repository root.
repo_path: PathBuf,
/// Directory in which commands will be invoked.
/// It might not be the repository root and it makes difference for many commands.
working_dir: PathBuf,
}
#[derive(Clone, Copy, Debug)]
pub struct Git;
impl Program for Git {
type Command = GitCommand;
fn executable_name(&self) -> &'static str {
"git"
}
fn current_directory(&self) -> Option<PathBuf> {
Some(self.working_dir.clone())
}
}
impl Git {
pub async fn new(repo_path: impl Into<PathBuf>) -> Result<Self> {
let repo_path = repo_path.into();
let temp_git = Git { working_dir: repo_path.clone(), repo_path };
let repo_path = temp_git.repository_root().await?;
Ok(Git { repo_path, working_dir: temp_git.working_dir })
/// Create a new, empty git repository in the given directory.
pub fn init(&self, path: impl AsRef<Path>) -> Result<GitCommand> {
let mut cmd = self.cmd()?;
cmd.arg(Command::Init);
cmd.current_dir(path);
Ok(cmd)
}
}
/// The wrapper over `Git` program invocation context.
///
/// It is stateful (knowing both repository root and current directory locations), as they both are
/// needed to properly handle relative paths.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Context {
/// The path to the repository root above the `working_dir`.
///
/// Many paths that git returns are relative to the repository root.
repository_root: PathBuf,
/// Directory in which commands will be invoked.
/// It might not be the repository root and it makes difference for many commands.
working_dir: PathBuf,
}
impl Context {
/// Initialize a new command invoking git.
pub fn cmd(&self) -> Result<GitCommand> {
Ok(Git.cmd()?.with_current_dir(&self.working_dir))
}
/// Create a wrapper with explicitly set repository root and working directory.
///
/// The caller is responsible for ensuring that the `working_dir` is a subdirectory of the
/// `repository_root`.
pub async fn new_unchecked(
repository_root: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
) -> Self {
Self {
repository_root: repository_root.as_ref().to_path_buf(),
working_dir: working_dir.as_ref().to_path_buf(),
}
}
/// Create a `Git` invocation context within a given directory.
///
/// The `working_dir` is the directory in which git commands will be invoked. It is expected to
/// be a part of some git repository.
pub async fn new(working_directory: impl Into<PathBuf>) -> Result<Self> {
let working_directory = working_directory.into();
// Faux `Git` instance to get the repository root.
// TODO: should be nicer, likely instance should be separate from program.
let temp_git = Context {
working_dir: working_directory.clone(),
repository_root: working_directory,
};
let repo_root = temp_git.repository_root().await?;
Ok(Context { repository_root: repo_root, working_dir: temp_git.working_dir })
}
pub async fn new_current() -> Result<Self> {
Git::new(crate::env::current_dir()?).await
Context::new(crate::env::current_dir()?).await
}
pub async fn head_hash(&self) -> Result<String> {
@ -59,7 +101,7 @@ impl Git {
/// List of files that are different than the compared commit.
#[context("Failed to list files that are different than {}.", compare_against.as_ref())]
pub async fn diff_against(&self, compare_against: impl AsRef<str>) -> Result<Vec<PathBuf>> {
let root = self.repo_path.as_path();
let root = self.repository_root.as_path();
Ok(self
.cmd()?
.args(["diff", "--name-only", compare_against.as_ref()])
@ -100,13 +142,20 @@ impl GitCommand {
#[derive(Clone, Copy, Debug)]
pub enum Command {
/// Remove untracked files from the working tree.
Clean,
/// Show changes between commits, commit and working tree, etc.
Diff,
/// Create an empty Git repository or reinitialize an existing one.
Init,
}
impl AsRef<OsStr> for Command {
fn as_ref(&self) -> &OsStr {
match self {
Command::Clean => OsStr::new("clean"),
Command::Diff => OsStr::new("diff"),
Command::Init => OsStr::new("init"),
}
}
}
@ -118,7 +167,7 @@ mod tests {
#[tokio::test]
#[ignore]
async fn repo_root() -> Result {
let git = Git::new(".").await?;
let git = Context::new(".").await?;
let diff = git.repository_root().await?;
println!("{:?}", diff);
Ok(())
@ -127,7 +176,7 @@ mod tests {
#[tokio::test]
#[ignore]
async fn call_diff() -> Result {
let git = Git::new(".").await?;
let git = Context::new(".").await?;
let diff = git.diff_against("origin/develop").await?;
println!("{:?}", diff);
Ok(())

View File

@ -2,7 +2,7 @@ use crate::prelude::*;
use crate::path::trie::Trie;
use crate::program::command::Manipulator;
use crate::programs::Git;
use crate::programs::git;
use std::path::Component;
@ -14,7 +14,7 @@ pub struct DirectoryToClear<'a> {
pub trie: &'a Trie<'a>,
}
/// Run ``git clean -xfd`` but preserve the given paths.
/// Run `git clean -xfd` but preserve the given paths.
///
/// This may involve multiple git clean calls on different subtrees.
/// Given paths can be either absolute or relative. If relative, they are relative to the
@ -22,6 +22,7 @@ pub struct DirectoryToClear<'a> {
pub async fn clean_except_for(
repo_root: impl AsRef<Path>,
paths: impl IntoIterator<Item: AsRef<Path>>,
dry_run: bool,
) -> Result {
let root = repo_root.as_ref().canonicalize()?;
@ -40,26 +41,16 @@ pub async fn clean_except_for(
})
.collect_vec();
let trie = Trie::from_iter(relative_exclusions.iter());
let exclusions = relative_exclusions.into_iter().map(Clean::exclude).collect_vec();
let mut directories_to_clear = vec![DirectoryToClear { prefix: vec![], trie: &trie }];
while let Some(DirectoryToClear { prefix, trie }) = directories_to_clear.pop() {
let current_dir = root.join_iter(&prefix);
let exclusions_in_current_dir =
trie.children.keys().map(|c| Clean::Exclude(c.as_os_str().to_string_lossy().into()));
let git = Git::new(&current_dir).await?;
git.cmd()?.clean().apply_iter(exclusions_in_current_dir).run_ok().await?;
for (child_name, child_trie) in trie.children.iter() {
if !child_trie.is_leaf() {
let mut prefix = prefix.clone();
prefix.push(*child_name);
directories_to_clear.push(DirectoryToClear { prefix, trie: child_trie });
}
}
}
Ok(())
git::Context::new(root)
.await?
.cmd()?
.nice_clean()
.apply_iter(exclusions)
.apply_opt(dry_run.then_some(&Clean::DryRun))
.run_ok()
.await
}
#[derive(Clone, Debug)]
@ -97,6 +88,17 @@ pub enum Clean {
OnlyIgnored,
}
impl Clean {
pub fn exclude(path: impl AsRef<Path>) -> Self {
let mut ret = String::new();
for component in path.as_ref().components() {
ret.push('/');
ret.push_str(&component.as_os_str().to_string_lossy());
}
Clean::Exclude(ret)
}
}
impl Manipulator for Clean {
fn apply<C: IsCommandWrapper + ?Sized>(&self, command: &mut C) {
// fn apply<'a, C: IsCommandWrapper + ?Sized>(&self, c: &'a mut C) -> &'a mut C {
@ -112,3 +114,31 @@ impl Manipulator for Clean {
command.args(args);
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::programs::Git;
#[tokio::test]
async fn test_cleaning() -> Result {
setup_logging()?;
let dir = PathBuf::from(r"C:\temp\test_cleaning");
crate::fs::tokio::reset_dir(&dir).await?;
Git.init(&dir)?.run_ok().await?;
let foo = dir.join("foo");
let foo_target = foo.join("target");
crate::fs::tokio::write(&foo_target, "target in foo").await?;
let target = dir.join("target");
let target_foo = target.join("foo");
crate::fs::tokio::write(&target_foo, "foo in target").await?;
clean_except_for(&dir, vec!["target/foo"], false).await?;
Ok(())
}
}

View File

@ -3,17 +3,14 @@ use crate::prelude::*;
pub mod env {
crate::define_env_var! {
/// The Rust toolchain version which was selected by Rustup.
///
/// If set, any cargo invocation will follow this version. Otherwise, Rustup will deduce
/// toolchain to be used and set up this variable for the spawned process.
///
/// Example value: `"nightly-2022-01-20-x86_64-pc-windows-msvc"`.
#[derive(Clone, Copy, Debug)]
pub struct Toolchain;
impl crate::env::Variable for Toolchain {
const NAME: &'static str = "RUSTUP_TOOLCHAIN";
RUSTUP_TOOLCHAIN, String;
}
}

View File

@ -1,7 +1,5 @@
use crate::prelude::*;
use snafu::Snafu;
#[derive(Clone, Copy, Debug)]
@ -26,48 +24,29 @@ impl Program for SevenZip {
vec![]
}
fn handle_exit_status(status: std::process::ExitStatus) -> anyhow::Result<()> {
fn handle_exit_status(status: std::process::ExitStatus) -> Result {
if status.success() {
Ok(())
} else if let Some(code) = status.code() {
Err(ExecutionError::from_exit_code(code).into())
error_from_exit_code(code)
} else {
Err(ExecutionError::Unknown.into())
bail!("Unknown execution error.")
}
}
}
// Cf https://7zip.bugaco.com/7zip/MANUAL/cmdline/exit_codes.htm
#[derive(Snafu, Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
pub enum ExecutionError {
#[snafu(display(
"Warning (Non fatal error(s)). For example, one or more files were locked by some \
other application, so they were not compressed."
))]
Warning,
#[snafu(display("Fatal error"))]
Fatal,
#[snafu(display("Command line error"))]
CommandLine,
#[snafu(display("Not enough memory for operation"))]
NotEnoughMemory,
#[snafu(display("User stopped the process"))]
UserStopped,
#[snafu(display("Unrecognized error code"))]
Unknown,
}
impl ExecutionError {
fn from_exit_code(code: i32) -> Self {
match code {
1 => Self::Warning,
2 => Self::Fatal,
7 => Self::CommandLine,
8 => Self::NotEnoughMemory,
255 => Self::UserStopped,
_ => Self::Unknown,
}
}
pub fn error_from_exit_code(code: i32) -> anyhow::Result<()> {
let message = match code {
1 =>
"Warning (Non fatal error(s)). For example, one or more files were locked by some \
other application, so they were not compressed.",
2 => "Fatal error.",
7 => "Command line error.",
8 => "Not enough memory for operation.",
255 => "User stopped the process.",
_ => "Unrecognized error code.",
};
bail!(message);
}
impl SevenZip {

View File

@ -266,8 +266,8 @@ impl Tar {
#[cfg(test)]
pub mod tests {
use super::*;
use crate::archive::compress_directory;
use crate::archive::extract_to;
use crate::archive::pack_directory_contents;
use crate::log::setup_logging;
#[test]
@ -296,7 +296,7 @@ pub mod tests {
let linked_temp = archive_temp.path().join("linked");
symlink::symlink_dir(temp.path(), &linked_temp)?;
pack_directory_contents(&archive_path, &linked_temp).await?;
compress_directory(&archive_path, &linked_temp).await?;
assert!(archive_path.exists());
assert!(archive_path.metadata()?.len() > 0);

View File

@ -39,7 +39,9 @@ impl VsWhere {
let stdout = command.run_stdout().await?;
let instances = serde_json::from_str::<Vec<InstanceInfo>>(&stdout)?;
Ok(instances.into_iter().next().ok_or(NoMsvcInstallation)?)
instances.into_iter().next().with_context(|| {
format!("No Visual Studio installation found with component {}.", component)
})
}
/// Looks up installation of Visual Studio that has installed
@ -54,10 +56,6 @@ impl VsWhere {
}
}
#[derive(Clone, Copy, Debug, Snafu)]
#[snafu(display("failed to find a MSVC installation"))]
pub struct NoMsvcInstallation;
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct InstanceInfo {
@ -139,7 +137,7 @@ impl From<&Format> for OsString {
}
// cf. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-community?view=vs-2019&preserve-view=true
#[derive(Clone, Copy, Debug)]
#[derive(Clone, Copy, Debug, Display)]
pub enum Component {
/// MSVC v142 - VS 2019 C++ x64/x86 build tools
CppBuildTools,

View File

@ -10,6 +10,7 @@ byte-unit = { version = "4.0.14", features = ["serde"] }
clap = { version = "3.1.5", features = ["derive", "env", "wrap_help"] }
chrono = "0.4.19"
derivative = "2.2.0"
enso-build-base = { path = "../base" }
enso-build = { path = "../build" }
enso-formatter = { path = "../enso-formatter" }
futures = "0.3.17"

View File

@ -6,9 +6,9 @@ use clap::Args;
use clap::Parser;
use clap::Subcommand;
use derivative::Derivative;
use enso_build_base::extensions::path::display_fmt;
use ide_ci::cache;
use ide_ci::extensions::path::display_fmt;
use ide_ci::models::config::RepoContext;
use ide_ci::github::Repo;
use octocrab::models::RunId;
@ -38,10 +38,10 @@ pub fn default_repo_path() -> Option<PathBuf> {
enso_build::repo::deduce_repository_path().ok()
}
pub fn default_repo_remote() -> RepoContext {
pub fn default_repo_remote() -> Repo {
ide_ci::actions::env::GITHUB_REPOSITORY
.get()
.unwrap_or_else(|_| RepoContext::from_str(DEFAULT_REMOTE_REPOSITORY_FALLBACK).unwrap())
.unwrap_or_else(|_| Repo::from_str(DEFAULT_REMOTE_REPOSITORY_FALLBACK).unwrap())
}
pub fn default_cache_path() -> Option<PathBuf> {
@ -75,6 +75,7 @@ pub trait IsTargetSource {
const SOURCE_NAME: &'static str;
const PATH_NAME: &'static str;
const OUTPUT_PATH_NAME: &'static str;
// const UPLOAD_ASSET_NAME: &'static str;
const RUN_ID_NAME: &'static str;
const RELEASE_DESIGNATOR_NAME: &'static str;
const ARTIFACT_NAME_NAME: &'static str;
@ -94,6 +95,7 @@ macro_rules! source_args_hlp {
const SOURCE_NAME: &'static str = concat!($prefix, "-", "source");
const PATH_NAME: &'static str = concat!($prefix, "-", "path");
const OUTPUT_PATH_NAME: &'static str = concat!($prefix, "-", "output-path");
// const UPLOAD_ASSET_NAME: &'static str = concat!($prefix, "-", "upload-asset");
const RUN_ID_NAME: &'static str = concat!($prefix, "-", "run-id");
const RELEASE_DESIGNATOR_NAME: &'static str = concat!($prefix, "-", "release");
const ARTIFACT_NAME_NAME: &'static str = concat!($prefix, "-", "artifact-name");
@ -156,11 +158,11 @@ pub struct Cli {
/// released versions to generate a new one, or uploading release assets).
/// The argument should follow the format `owner/repo_name`.
#[clap(long, global = true, default_value_t = default_repo_remote(), enso_env())]
pub repo_remote: RepoContext,
pub repo_remote: Repo,
/// The build kind. Affects the default version generation.
#[clap(long, global = true, arg_enum, default_value_t = enso_build::version::BuildKind::Dev, env = crate::BuildKind::NAME)]
pub build_kind: enso_build::version::BuildKind,
#[clap(long, global = true, arg_enum, default_value_t = enso_build::version::Kind::Dev, env = *crate::ENSO_BUILD_KIND)]
pub build_kind: enso_build::version::Kind,
/// Platform to target. Currently cross-compilation is enabled only for GUI/IDE (without
/// Project Manager) on platforms where Electron Builder supports this.
@ -222,6 +224,9 @@ pub struct Source<Target: IsTargetSource> {
#[clap(flatten)]
pub output_path: OutputPath<Target>,
//
// #[clap(name = Target::UPLOAD_ASSET_NAME, long)]
// pub upload_asset: bool,
}
/// Discriminator denoting how some target artifact should be obtained.

View File

@ -4,6 +4,9 @@ use crate::prelude::*;
#[derive(Clone, Copy, Debug, Default, clap::Args)]
pub struct Options {
/// Do not perform the action, just print what would be deleted.
#[clap(long)]
pub dry_run: bool,
/// Clean also the build script's cache (located in the user's local application data subtree).
#[clap(long)]
pub cache: bool,

View File

@ -37,7 +37,7 @@ pub enum Command {
Upload {
#[clap(flatten)]
params: BuildInput,
#[clap(long, env = enso_build::env::ReleaseId::NAME)]
#[clap(long, env = *enso_build::env::ENSO_RELEASE_ID)]
release_id: ReleaseId,
},
/// Like `Build` but automatically starts the IDE.

View File

@ -6,16 +6,21 @@ use clap::Subcommand;
#[derive(Args, Clone, Debug)]
pub struct DeployToEcr {
pub struct DeployRuntime {
#[clap(long, default_value = enso_build::aws::ecr::runtime::NAME, enso_env())]
pub ecr_repository: String,
}
#[derive(Args, Clone, Copy, Debug)]
pub struct DeployGui {}
#[derive(Subcommand, Clone, Debug)]
pub enum Action {
CreateDraft,
/// Build the runtime image and push it to ECR.
DeployToEcr(DeployToEcr),
DeployRuntime(DeployRuntime),
/// Upload the GUI to the S3 Bucket and notify.
DeployGui(DeployGui),
Publish,
}

View File

@ -7,19 +7,19 @@
use enso_build_cli::prelude::*;
use enso_build::setup_octocrab;
use ide_ci::github::Repo;
use ide_ci::io::web::handle_error_response;
use ide_ci::log::setup_logging;
use ide_ci::models::config::RepoContext;
#[tokio::main]
async fn main() -> Result {
setup_logging()?;
let repo = RepoContext::from_str("enso-org/enso")?;
let octo = setup_octocrab().await?;
let repo = Repo::from_str("enso-org/enso")?.handle(&octo);
let releases = repo.all_releases(&octo).await?;
let releases = repo.all_releases().await?;
let draft_releases = releases.into_iter().filter(|r| r.draft);
for release in draft_releases {
let id = release.id;

View File

@ -5,6 +5,7 @@ use crate::ci_gen::job::plain_job;
use crate::ci_gen::job::plain_job_customized;
use crate::ci_gen::job::RunsOn;
use enso_build::version;
use ide_ci::actions::workflow::definition::checkout_repo_step;
use ide_ci::actions::workflow::definition::is_non_windows_runner;
use ide_ci::actions::workflow::definition::is_windows_runner;
@ -51,6 +52,8 @@ pub const TARGETED_SYSTEMS: [OS; 3] = [OS::Windows, OS::Linux, OS::MacOS];
pub const DEFAULT_BRANCH_NAME: &str = "develop";
pub const RELEASE_CONCURRENCY_GROUP: &str = "release";
/// Secrets set up in our organization.
///
/// To manage, see: https://github.com/organizations/enso-org/settings/secrets/actions
@ -71,7 +74,6 @@ pub mod secret {
pub const APPLE_NOTARIZATION_USERNAME: &str = "APPLE_NOTARIZATION_USERNAME";
pub const APPLE_NOTARIZATION_PASSWORD: &str = "APPLE_NOTARIZATION_PASSWORD";
// === Windows Code Signing ===
/// Name of the GitHub Actions secret that stores path to the Windows code signing certificate
/// within the runner.
@ -80,8 +82,15 @@ pub mod secret {
/// Name of the GitHub Actions secret that stores password to the Windows code signing
/// certificate.
pub const WINDOWS_CERT_PASSWORD: &str = "MICROSOFT_CODE_SIGNING_CERT_PASSWORD";
// === Github Token ===
/// A token created for the `enso-ci` user.
pub const CI_PRIVATE_TOKEN: &str = "CI_PRIVATE_TOKEN";
}
pub fn release_concurrency() -> Concurrency {
Concurrency::new(RELEASE_CONCURRENCY_GROUP)
}
impl RunsOn for DeluxeRunner {
fn runs_on(&self) -> Vec<RunnerLabel> {
@ -247,23 +256,24 @@ pub fn nightly() -> Result<Workflow> {
..default()
};
let linux_only = OS::Linux;
let concurrency_group = "release";
let mut workflow = Workflow {
on,
name: "Nightly Release".into(),
concurrency: Some(Concurrency::new(concurrency_group)),
concurrency: Some(release_concurrency()),
..default()
};
let prepare_job_id = workflow.add::<DraftRelease>(linux_only);
let build_wasm_job_id = workflow.add::<job::BuildWasm>(linux_only);
add_release_steps(&mut workflow, version::Kind::Nightly)?;
Ok(workflow)
}
fn add_release_steps(workflow: &mut Workflow, kind: version::Kind) -> Result {
let prepare_job_id = workflow.add::<DraftRelease>(PRIMARY_OS);
let build_wasm_job_id = workflow.add::<job::BuildWasm>(PRIMARY_OS);
let mut packaging_job_ids = vec![];
// Assumed, because Linux is necessary to deploy ECR runtime image.
assert!(TARGETED_SYSTEMS.contains(&OS::Linux));
for os in TARGETED_SYSTEMS {
let backend_job_id = workflow.add_dependent::<job::UploadBackend>(os, [&prepare_job_id]);
let build_ide_job_id = workflow.add_dependent::<UploadIde>(os, [
@ -271,12 +281,20 @@ pub fn nightly() -> Result<Workflow> {
&backend_job_id,
&build_wasm_job_id,
]);
packaging_job_ids.push(build_ide_job_id);
packaging_job_ids.push(build_ide_job_id.clone());
// Deploying our release to cloud needs to be done only once.
// We could do this on any platform, but we choose Linux, because it's most easily
// available and performant.
if os == OS::Linux {
let upload_runtime_job_id = workflow
.add_dependent::<job::UploadRuntimeToEcr>(os, [&prepare_job_id, &backend_job_id]);
let runtime_requirements = [&prepare_job_id, &backend_job_id];
let upload_runtime_job_id =
workflow.add_dependent::<job::DeployRuntime>(os, runtime_requirements);
packaging_job_ids.push(upload_runtime_job_id);
let gui_requirements = [build_ide_job_id];
let deploy_gui_job_id = workflow.add_dependent::<job::DeployGui>(os, gui_requirements);
packaging_job_ids.push(deploy_gui_job_id);
}
}
@ -285,11 +303,28 @@ pub fn nightly() -> Result<Workflow> {
packaging_job_ids
};
let _publish_job_id = workflow.add_dependent::<PublishRelease>(linux_only, publish_deps);
let global_env = [("ENSO_BUILD_KIND", "nightly"), ("RUST_BACKTRACE", "full")];
let _publish_job_id = workflow.add_dependent::<PublishRelease>(PRIMARY_OS, publish_deps);
let global_env = [(*crate::ENSO_BUILD_KIND, kind.as_ref()), ("RUST_BACKTRACE", "full")];
for (var_name, value) in global_env {
workflow.env(var_name, value);
}
Ok(())
}
pub fn release_candidate() -> Result<Workflow> {
let on = Event { workflow_dispatch: Some(default()), ..default() };
let mut workflow = Workflow {
on,
name: "Release Candidate".into(),
concurrency: Some(release_concurrency()),
..default()
};
add_release_steps(&mut workflow, version::Kind::Rc)?;
Ok(workflow)
}
@ -378,5 +413,6 @@ pub fn generate(repo_root: &enso_build::paths::generated::RepoRootGithubWorkflow
repo_root.scala_new_yml.write_as_yaml(&backend()?)?;
repo_root.gui_yml.write_as_yaml(&gui()?)?;
repo_root.benchmark_yml.write_as_yaml(&benchmark()?)?;
repo_root.release_yml.write_as_yaml(&release_candidate()?)?;
Ok(())
}

View File

@ -149,11 +149,12 @@ impl JobArchetype for UploadBackend {
}
#[derive(Clone, Copy, Debug)]
pub struct UploadRuntimeToEcr;
impl JobArchetype for UploadRuntimeToEcr {
pub struct DeployRuntime;
impl JobArchetype for DeployRuntime {
fn job(os: OS) -> Job {
plain_job_customized(&os, "Upload Runtime to ECR", "release deploy-to-ecr", |step| {
plain_job_customized(&os, "Upload Runtime to ECR", "release deploy-runtime", |step| {
let step = step
.with_secret_exposed_as("CI_PRIVATE_TOKEN", "GITHUB_TOKEN")
.with_env("ENSO_BUILD_ECR_REPOSITORY", enso_build::aws::ecr::runtime::NAME)
.with_secret_exposed_as(secret::ECR_PUSH_RUNTIME_ACCESS_KEY_ID, "AWS_ACCESS_KEY_ID")
.with_secret_exposed_as(
@ -166,6 +167,23 @@ impl JobArchetype for UploadRuntimeToEcr {
}
}
#[derive(Clone, Copy, Debug)]
pub struct DeployGui;
impl JobArchetype for DeployGui {
fn job(os: OS) -> Job {
plain_job_customized(&os, "Upload GUI to S3", "release deploy-gui", |step| {
let step = step
.with_secret_exposed_as("CI_PRIVATE_TOKEN", "GITHUB_TOKEN")
.with_secret_exposed_as(secret::ARTEFACT_S3_ACCESS_KEY_ID, "AWS_ACCESS_KEY_ID")
.with_secret_exposed_as(
secret::ARTEFACT_S3_SECRET_ACCESS_KEY,
"AWS_SECRET_ACCESS_KEY",
);
vec![step]
})
}
}
pub fn expose_os_specific_signing_secret(os: OS, step: Step) -> Step {
match os {
OS::Windows => step
@ -178,11 +196,23 @@ pub fn expose_os_specific_signing_secret(os: OS, step: Step) -> Step {
&enso_build::ide::web::env::WIN_CSC_KEY_PASSWORD,
),
OS::MacOS => step
.with_secret_exposed_as(secret::APPLE_CODE_SIGNING_CERT, "CSC_LINK")
.with_secret_exposed_as(secret::APPLE_CODE_SIGNING_CERT_PASSWORD, "CSC_KEY_PASSWORD")
.with_secret_exposed_as(secret::APPLE_NOTARIZATION_USERNAME, "APPLEID")
.with_secret_exposed_as(secret::APPLE_NOTARIZATION_PASSWORD, "APPLEIDPASS")
.with_env("CSC_IDENTITY_AUTO_DISCOVERY", "true"),
.with_secret_exposed_as(
secret::APPLE_CODE_SIGNING_CERT,
&enso_build::ide::web::env::CSC_LINK,
)
.with_secret_exposed_as(
secret::APPLE_CODE_SIGNING_CERT_PASSWORD,
&enso_build::ide::web::env::CSC_KEY_PASSWORD,
)
.with_secret_exposed_as(
secret::APPLE_NOTARIZATION_USERNAME,
&enso_build::ide::web::env::APPLEID,
)
.with_secret_exposed_as(
secret::APPLE_NOTARIZATION_PASSWORD,
&enso_build::ide::web::env::APPLEIDPASS,
)
.with_env(&enso_build::ide::web::env::CSC_IDENTITY_AUTO_DISCOVERY, "true"),
_ => step,
}
}

View File

@ -33,8 +33,6 @@ pub mod prelude {
use crate::prelude::*;
use std::future::join;
use ide_ci::env::Variable;
use crate::arg::java_gen;
use crate::arg::release::Action;
use crate::arg::BuildJob;
@ -80,10 +78,9 @@ use enso_build::source::WithDestination;
use futures_util::future::try_join;
use ide_ci::actions::workflow::is_in_env;
use ide_ci::cache::Cache;
use ide_ci::define_env_var;
use ide_ci::fs::remove_if_exists;
use ide_ci::github::release::upload_asset;
use ide_ci::global;
use ide_ci::log::setup_logging;
use ide_ci::ok_ready_boxed;
use ide_ci::programs::cargo;
use ide_ci::programs::git::clean;
@ -99,11 +96,8 @@ fn resolve_artifact_name(input: Option<String>, project: &impl IsTarget) -> Stri
input.unwrap_or_else(|| project.artifact_name())
}
#[derive(Clone, Copy, Debug)]
pub struct BuildKind;
impl Variable for BuildKind {
const NAME: &'static str = "ENSO_BUILD_KIND";
type Value = enso_build::version::BuildKind;
define_env_var! {
ENSO_BUILD_KIND, enso_build::version::Kind;
}
/// The basic, common information available in this application.
@ -125,22 +119,18 @@ impl Processor {
/// Setup common build environment information based on command line input and local
/// environment.
pub async fn new(cli: &Cli) -> Result<Self> {
// let build_kind = match &cli.target {
// Target::Release(release) => release.kind,
// _ => enso_build::version::BuildKind::Dev,
// };
let absolute_repo_path = cli.repo_path.absolutize()?;
let octocrab = setup_octocrab().await?;
let versions = enso_build::version::deduce_versions(
&octocrab,
let remote_repo = cli.repo_remote.handle(&octocrab);
let versions = enso_build::version::deduce_or_generate(
Ok(&remote_repo),
cli.build_kind,
Ok(&cli.repo_remote),
&absolute_repo_path,
)
.await?;
let mut triple = TargetTriple::new(versions);
triple.os = cli.target_os;
triple.versions.publish()?;
triple.versions.publish().await?;
let context = BuildContext {
inner: project::Context {
cache: Cache::new(&cli.cache_path).await?,
@ -215,16 +205,8 @@ impl Processor {
let release = self.resolve_release_designator(designator);
release
.and_then_sync(move |release| {
Ok(ReleaseSource {
repository,
asset_id: target
.find_asset(release.assets)
.context(format!(
"Failed to find a relevant asset in the release '{}'.",
release.tag_name
))?
.id,
})
let asset = target.find_asset(&release)?;
Ok(ReleaseSource { repository, asset_id: asset.id })
})
.boxed()
}
@ -487,13 +469,15 @@ impl Processor {
arg::ide::Command::Build { params } => self.build_ide(params).void_ok().boxed(),
arg::ide::Command::Upload { params, release_id } => {
let build_job = self.build_ide(params);
let remote_repo = self.remote_repo.clone();
let client = self.octocrab.client.clone();
let release = ide_ci::github::release::ReleaseHandle::new(
&self.octocrab,
self.remote_repo.clone(),
release_id,
);
async move {
let artifacts = build_job.await?;
upload_asset(&remote_repo, &client, release_id, &artifacts.image).await?;
upload_asset(&remote_repo, &client, release_id, &artifacts.image_checksum)
.await?;
release.upload_asset_file(&artifacts.image).await?;
release.upload_asset_file(&artifacts.image_checksum).await?;
Ok(())
}
.boxed()
@ -764,21 +748,32 @@ impl WatchResolvable for Gui {
}
}
#[tracing::instrument(err)]
pub async fn main_internal(config: enso_build::config::Config) -> Result {
setup_logging()?;
#[tracing::instrument(err, skip(config))]
pub async fn main_internal(config: Option<enso_build::config::Config>) -> Result {
trace!("Starting the build process.");
let config = config.unwrap_or_else(|| {
warn!("No config provided, using default config.");
enso_build::config::Config::default()
});
trace!("Creating the build context.");
// Setup that affects Cli parser construction.
if let Some(wasm_size_limit) = config.wasm_size_limit {
crate::arg::wasm::initialize_default_wasm_size_limit(wasm_size_limit)?;
}
debug!("Initial configuration for the CLI driver: {config:#?}");
let cli = Cli::parse();
debug!("Parsed CLI arguments: {cli:#?}");
if !cli.skip_version_check {
config.check_programs().await?;
// Let's be helpful!
let error_message = "Program requirements were not fulfilled. Please do one of the \
following:\n * Install the tools in the required versions.\n * Update the requirements in \
`build-config.yaml`.\n * Run the build with `--skip-version-check` flag.";
config.check_programs().await.context(error_message)?;
}
// TRANSITION: Previous Engine CI job used to clone these both repositories side-by-side.
@ -800,15 +795,17 @@ pub async fn main_internal(config: enso_build::config::Config) -> Result {
Target::Ide(ide) => ctx.handle_ide(ide).await?,
// TODO: consider if out-of-source ./dist should be removed
Target::GitClean(options) => {
let crate::arg::git_clean::Options { dry_run, cache, build_script } = options;
let mut exclusions = vec![".idea"];
if !options.build_script {
if !build_script {
exclusions.push("target/enso-build");
}
let git_clean = clean::clean_except_for(&ctx.repo_root, exclusions);
let git_clean = clean::clean_except_for(&ctx.repo_root, exclusions, dry_run);
let clean_cache = async {
if options.cache {
ide_ci::fs::tokio::remove_dir_if_exists(ctx.cache.path()).await?;
if cache {
ide_ci::fs::tokio::perhaps_remove_dir_if_exists(dry_run, ctx.cache.path())
.await?;
}
Result::Ok(())
};
@ -842,24 +839,26 @@ pub async fn main_internal(config: enso_build::config::Config) -> Result {
let prettier = prettier::write(&ctx.repo_root);
let our_formatter =
enso_formatter::process_path(&ctx.repo_root, enso_formatter::Action::Format);
// our_formatter.await?;
// prettier.await?;
let (r1, r2) = join!(prettier, our_formatter).await;
r1?;
r2?;
}
Target::Release(release) => match release.action {
Action::CreateDraft => {
enso_build::release::create_release(&ctx).await?;
enso_build::release::draft_a_new_release(&ctx).await?;
}
Action::DeployToEcr(args) => {
Action::DeployRuntime(args) => {
enso_build::release::deploy_to_ecr(&ctx, args.ecr_repository).await?;
enso_build::release::dispatch_cloud_image_build_action(
enso_build::repo::cloud::build_image_workflow_dispatch_input(
&ctx.octocrab,
&ctx.triple.versions.version,
)
.await?;
}
Action::DeployGui(args) => {
let crate::arg::release::DeployGui {} = args;
enso_build::release::upload_gui_to_cloud_good(&ctx).await?;
}
Action::Publish => {
enso_build::release::publish_release(&ctx).await?;
}
@ -893,39 +892,12 @@ pub async fn main_internal(config: enso_build::config::Config) -> Result {
Ok(())
}
pub fn lib_main(config: enso_build::config::Config) -> Result {
pub fn lib_main(config: Option<enso_build::config::Config>) -> Result {
trace!("Starting the tokio runtime.");
let rt = tokio::runtime::Runtime::new()?;
trace!("Entering main.");
rt.block_on(async { main_internal(config).await })?;
rt.shutdown_timeout(Duration::from_secs(60 * 30));
info!("Successfully ending.");
Ok(())
}
// #[cfg(test)]
// mod tests {
// use super::*;
// use enso_build::version::Versions;
// use ide_ci::models::config::RepoContext;
//
// #[tokio::test]
// async fn resolving_release() -> Result {
// setup_logging()?;
// let octocrab = Octocrab::default();
// let context = Processor {
// context: BuildContext {
// remote_repo: RepoContext::from_str("enso-org/enso")?,
// triple: TargetTriple::new(Versions::new(Version::new(2022, 1, 1))),
// source_root: r"H:/NBO/enso5".into(),
// octocrab,
// cache: Cache::new_default().await?,
// },
// };
//
// dbg!(
// context.resolve_release_source(Backend { target_os: TARGET_OS },
// "latest".into()).await )?;
//
// Ok(())
// }
// }

View File

@ -6,8 +6,16 @@
use enso_build::prelude::*;
use enso_build::config::Config;
use enso_build::config::ConfigRaw;
fn main() -> Result {
enso_build_cli::lib_main(Default::default())
setup_logging()?;
trace!("Starting CLI driver, cwd is {}", ide_ci::env::current_dir()?.display());
let build_config_yaml = include_str!("../../../build-config.yaml");
let build_config_raw = serde_yaml::from_str::<ConfigRaw>(build_config_yaml)?;
let build_config = Config::try_from(build_config_raw)?;
enso_build_cli::lib_main(Some(build_config))
}

Some files were not shown because too many files have changed in this diff Show More