Shaders precompilation (#4003)

This commit is contained in:
Wojciech Daniło 2023-01-27 01:09:09 +01:00 committed by GitHub
parent ca2f108513
commit da84e34b9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
201 changed files with 8877 additions and 5492 deletions

1
.eslintignore Normal file
View File

@ -0,0 +1 @@
node_modules/

View File

@ -1,3 +1,6 @@
# This file is auto-generated. Do not edit it manually!
# Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`.
name: Benchmark Engine
on:
push:

View File

@ -1,3 +1,6 @@
# This file is auto-generated. Do not edit it manually!
# Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`.
name: Changelog
on:
pull_request:

View File

@ -1,3 +1,6 @@
# This file is auto-generated. Do not edit it manually!
# Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`.
name: GUI CI
on:
push:
@ -6,7 +9,7 @@ on:
pull_request: {}
workflow_dispatch: {}
jobs:
enso-build-cli-ci-gen-job-build-backend-linux:
enso-build-ci-gen-job-build-backend-linux:
name: Build Backend (linux)
runs-on:
- self-hosted
@ -68,7 +71,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-backend-macos:
enso-build-ci-gen-job-build-backend-macos:
name: Build Backend (macos)
runs-on:
- macos-latest
@ -128,7 +131,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-backend-windows:
enso-build-ci-gen-job-build-backend-windows:
name: Build Backend (windows)
runs-on:
- self-hosted
@ -190,7 +193,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-wasm-linux:
enso-build-ci-gen-job-build-wasm-linux:
name: Build GUI (WASM) (linux)
runs-on:
- self-hosted
@ -252,7 +255,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-wasm-macos:
enso-build-ci-gen-job-build-wasm-macos:
name: Build GUI (WASM) (macos)
runs-on:
- macos-latest
@ -312,7 +315,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-build-wasm-windows:
enso-build-ci-gen-job-build-wasm-windows:
name: Build GUI (WASM) (windows)
runs-on:
- self-hosted
@ -374,7 +377,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-cancel-workflow-linux:
enso-build-ci-gen-job-cancel-workflow-linux:
name: Cancel Previous Runs
runs-on:
- ubuntu-latest
@ -383,7 +386,7 @@ jobs:
uses: styfle/cancel-workflow-action@0.9.1
with:
access_token: ${{ github.token }}
enso-build-cli-ci-gen-job-lint-linux:
enso-build-ci-gen-job-lint-linux:
name: Lint (linux)
runs-on:
- self-hosted
@ -445,7 +448,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-native-test-linux:
enso-build-ci-gen-job-native-test-linux:
name: Native GUI tests (linux)
runs-on:
- self-hosted
@ -507,11 +510,11 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-package-ide-linux:
enso-build-ci-gen-job-package-ide-linux:
name: Package IDE (linux)
needs:
- enso-build-cli-ci-gen-job-build-backend-linux
- enso-build-cli-ci-gen-job-build-wasm-linux
- enso-build-ci-gen-job-build-backend-linux
- enso-build-ci-gen-job-build-wasm-linux
runs-on:
- self-hosted
- Linux
@ -572,11 +575,11 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-package-ide-macos:
enso-build-ci-gen-job-package-ide-macos:
name: Package IDE (macos)
needs:
- enso-build-cli-ci-gen-job-build-backend-macos
- enso-build-cli-ci-gen-job-build-wasm-linux
- enso-build-ci-gen-job-build-backend-macos
- enso-build-ci-gen-job-build-wasm-linux
runs-on:
- macos-latest
steps:
@ -640,11 +643,11 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-package-ide-windows:
enso-build-ci-gen-job-package-ide-windows:
name: Package IDE (windows)
needs:
- enso-build-cli-ci-gen-job-build-backend-windows
- enso-build-cli-ci-gen-job-build-wasm-linux
- enso-build-ci-gen-job-build-backend-windows
- enso-build-ci-gen-job-build-wasm-linux
runs-on:
- self-hosted
- Windows
@ -707,7 +710,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-wasm-test-linux:
enso-build-ci-gen-job-wasm-test-linux:
name: WASM GUI tests (linux)
runs-on:
- self-hosted

View File

@ -1,3 +1,6 @@
# This file is auto-generated. Do not edit it manually!
# Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`.
name: Nightly Release
on:
schedule:

View File

@ -1,3 +1,6 @@
# This file is auto-generated. Do not edit it manually!
# Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`.
name: Generate a new version
on:
workflow_dispatch:
@ -18,7 +21,7 @@ on:
required: true
type: string
jobs:
enso-build-cli-ci-gen-promote-release-job-linux:
enso-build-ci-gen-promote-release-job-linux:
name: Promote release (linux)
runs-on:
- self-hosted
@ -88,10 +91,10 @@ jobs:
release:
name: Release
needs:
- enso-build-cli-ci-gen-promote-release-job-linux
- enso-build-ci-gen-promote-release-job-linux
uses: ./.github/workflows/release.yml
with:
version: ${{ needs.enso-build-cli-ci-gen-promote-release-job-linux.outputs.ENSO_VERSION }}
version: ${{ needs.enso-build-ci-gen-promote-release-job-linux.outputs.ENSO_VERSION }}
secrets: inherit
env:
ENSO_BUILD_SKIP_VERSION_CHECK: "true"

View File

@ -1,3 +1,6 @@
# This file is auto-generated. Do not edit it manually!
# Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`.
name: Release
on:
workflow_dispatch:
@ -13,7 +16,7 @@ on:
required: true
type: string
jobs:
enso-build-cli-ci-gen-draft-release-linux:
enso-build-ci-gen-draft-release-linux:
name: Create a release draft.
runs-on:
- self-hosted
@ -61,7 +64,7 @@ jobs:
outputs:
ENSO_RELEASE_ID: ${{ steps.prepare.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ steps.prepare.outputs.ENSO_VERSION }}
enso-build-cli-ci-gen-job-build-wasm-linux:
enso-build-ci-gen-job-build-wasm-linux:
name: Build GUI (WASM) (linux)
runs-on:
- self-hosted
@ -123,10 +126,10 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-deploy-gui-linux:
enso-build-ci-gen-job-deploy-gui-linux:
name: Upload GUI to S3 (linux)
needs:
- enso-build-cli-ci-gen-upload-ide-linux
- enso-build-ci-gen-upload-ide-linux
runs-on:
- self-hosted
- Linux
@ -189,11 +192,11 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-deploy-runtime-linux:
enso-build-ci-gen-job-deploy-runtime-linux:
name: Upload Runtime to ECR (linux)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-cli-ci-gen-job-upload-backend-linux
- enso-build-ci-gen-draft-release-linux
- enso-build-ci-gen-job-upload-backend-linux
runs-on:
- self-hosted
- Linux
@ -243,8 +246,8 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.ECR_PUSH_RUNTIME_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: eu-west-1
AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY }}
ENSO_BUILD_ECR_REPOSITORY: runtime
GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }}
crate_ECR_REPOSITORY: runtime
timeout-minutes: 360
- if: failure() && runner.os == 'Windows'
name: List files if failed (Windows)
@ -259,12 +262,12 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-cli-ci-gen-job-upload-backend-linux:
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-ci-gen-job-upload-backend-linux:
name: Upload Backend (linux)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-ci-gen-draft-release-linux
runs-on:
- self-hosted
- Linux
@ -326,12 +329,12 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-cli-ci-gen-job-upload-backend-macos:
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-ci-gen-job-upload-backend-macos:
name: Upload Backend (macos)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-ci-gen-draft-release-linux
runs-on:
- macos-latest
steps:
@ -391,12 +394,12 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-cli-ci-gen-job-upload-backend-windows:
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-ci-gen-job-upload-backend-windows:
name: Upload Backend (windows)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-ci-gen-draft-release-linux
runs-on:
- self-hosted
- Windows
@ -458,17 +461,17 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-cli-ci-gen-publish-release-linux:
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-ci-gen-publish-release-linux:
name: Publish release (linux)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-cli-ci-gen-job-deploy-gui-linux
- enso-build-cli-ci-gen-job-deploy-runtime-linux
- enso-build-cli-ci-gen-upload-ide-linux
- enso-build-cli-ci-gen-upload-ide-macos
- enso-build-cli-ci-gen-upload-ide-windows
- enso-build-ci-gen-draft-release-linux
- enso-build-ci-gen-job-deploy-gui-linux
- enso-build-ci-gen-job-deploy-runtime-linux
- enso-build-ci-gen-upload-ide-linux
- enso-build-ci-gen-upload-ide-macos
- enso-build-ci-gen-upload-ide-windows
runs-on:
- self-hosted
- Linux
@ -533,14 +536,14 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.ARTEFACT_S3_ACCESS_KEY_ID }}
AWS_REGION: us-west-1
AWS_SECRET_ACCESS_KEY: ${{ secrets.ARTEFACT_S3_SECRET_ACCESS_KEY }}
ENSO_RELEASE_ID: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-cli-ci-gen-upload-ide-linux:
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-ci-gen-upload-ide-linux:
name: Build IDE (linux)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-cli-ci-gen-job-build-wasm-linux
- enso-build-cli-ci-gen-job-upload-backend-linux
- enso-build-ci-gen-draft-release-linux
- enso-build-ci-gen-job-build-wasm-linux
- enso-build-ci-gen-job-upload-backend-linux
runs-on:
- self-hosted
- Linux
@ -602,14 +605,14 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-cli-ci-gen-upload-ide-macos:
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-ci-gen-upload-ide-macos:
name: Build IDE (macos)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-cli-ci-gen-job-build-wasm-linux
- enso-build-cli-ci-gen-job-upload-backend-macos
- enso-build-ci-gen-draft-release-linux
- enso-build-ci-gen-job-build-wasm-linux
- enso-build-ci-gen-job-upload-backend-macos
runs-on:
- macos-latest
steps:
@ -674,14 +677,14 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-cli-ci-gen-upload-ide-windows:
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
enso-build-ci-gen-upload-ide-windows:
name: Build IDE (windows)
needs:
- enso-build-cli-ci-gen-draft-release-linux
- enso-build-cli-ci-gen-job-build-wasm-linux
- enso-build-cli-ci-gen-job-upload-backend-windows
- enso-build-ci-gen-draft-release-linux
- enso-build-ci-gen-job-build-wasm-linux
- enso-build-ci-gen-job-upload-backend-windows
runs-on:
- self-hosted
- Windows
@ -745,8 +748,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
ENSO_RELEASE_ID: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID }}
ENSO_VERSION: ${{ needs.enso-build-ci-gen-draft-release-linux.outputs.ENSO_VERSION }}
env:
ENSO_BUILD_SKIP_VERSION_CHECK: "true"
ENSO_EDITION: ${{ inputs.version }}

View File

@ -1,3 +1,6 @@
# This file is auto-generated. Do not edit it manually!
# Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`.
name: Engine CI
on:
push:
@ -6,7 +9,7 @@ on:
pull_request: {}
workflow_dispatch: {}
jobs:
enso-build-cli-ci-gen-job-cancel-workflow-linux:
enso-build-ci-gen-job-cancel-workflow-linux:
name: Cancel Previous Runs
runs-on:
- ubuntu-latest
@ -15,7 +18,7 @@ jobs:
uses: styfle/cancel-workflow-action@0.9.1
with:
access_token: ${{ github.token }}
enso-build-cli-ci-gen-job-ci-check-backend-linux:
enso-build-ci-gen-job-ci-check-backend-linux:
name: Engine (linux)
runs-on:
- self-hosted
@ -95,7 +98,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-ci-check-backend-macos:
enso-build-ci-gen-job-ci-check-backend-macos:
name: Engine (macos)
runs-on:
- macos-latest
@ -173,7 +176,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 360
enso-build-cli-ci-gen-job-ci-check-backend-windows:
enso-build-ci-gen-job-ci-check-backend-windows:
name: Engine (windows)
runs-on:
- self-hosted

150
.github/workflows/shader-tools.yml vendored Normal file
View File

@ -0,0 +1,150 @@
# This file is auto-generated. Do not edit it manually!
# Edit the build\shader-tools\src\ci.rs module instead and run `cargo run --package enso-build-ci-gen`.
name: Package Tools
on:
workflow_dispatch: {}
jobs:
run-create-linux-latest:
name: Run create (LinuxLatest)
runs-on:
- ubuntu-latest
steps:
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
shell: cmd
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
with:
clean: false
submodules: recursive
- id: step_0
run: cargo run --package enso-build-shader-tools --bin create
env:
GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }}
timeout-minutes: 360
outputs:
ENSO_RELEASE_ID: ${{ steps.step_0.outputs.ENSO_RELEASE_ID }}
timeout-minutes: 360
run-package-linux-latest:
name: Run package (LinuxLatest)
needs:
- run-create-linux-latest
runs-on:
- ubuntu-latest
steps:
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
shell: cmd
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
with:
clean: false
submodules: recursive
- id: step_1
run: cargo run --package enso-build-shader-tools --bin package
env:
GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.run-create-linux-latest.outputs.ENSO_RELEASE_ID }}
timeout-minutes: 360
run-package-mac-os-latest:
name: Run package (MacOSLatest)
needs:
- run-create-linux-latest
runs-on:
- macos-latest
steps:
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
shell: cmd
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
with:
clean: false
submodules: recursive
- id: step_3
run: cargo run --package enso-build-shader-tools --bin package
env:
GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.run-create-linux-latest.outputs.ENSO_RELEASE_ID }}
timeout-minutes: 360
run-package-windows-latest:
name: Run package (WindowsLatest)
needs:
- run-create-linux-latest
runs-on:
- windows-latest
steps:
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
shell: cmd
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
with:
clean: false
submodules: recursive
- id: step_2
run: cargo run --package enso-build-shader-tools --bin package
env:
GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.run-create-linux-latest.outputs.ENSO_RELEASE_ID }}
timeout-minutes: 360
run-publish-linux-latest:
name: Run publish (LinuxLatest)
needs:
- run-create-linux-latest
- run-package-linux-latest
- run-package-mac-os-latest
- run-package-windows-latest
runs-on:
- ubuntu-latest
steps:
- if: runner.os == 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (Windows)
run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"'
shell: cmd
- if: runner.os != 'Windows'
name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows)
run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"
shell: bash
- name: Checking out the repository
uses: actions/checkout@v2
with:
clean: false
submodules: recursive
- id: step_4
run: cargo run --package enso-build-shader-tools --bin publish
env:
GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }}
timeout-minutes: 360
env:
ENSO_RELEASE_ID: ${{ needs.run-create-linux-latest.outputs.ENSO_RELEASE_ID }}
timeout-minutes: 360
env:
ENSO_BUILD_SKIP_VERSION_CHECK: "true"

View File

@ -124,10 +124,19 @@
with a shared implementation between the Desktop and Web versions of the IDE.
- [Added a new component: Dropdown][3985]. A list of selectable labeled entries,
suitable for single and multi-select scenarios.
- [Compile-time shader optimizer was implemented][4003]. It is capable of
extracting non-optimized shaders from the compiled WASM artifacts, running
stand-alone optimization toolchain (glslc, spirv-opt, spirv-cross), and
injecting optimized shaders back to WASM during its initialization process.
Unfortunately, it caused our theme system to stop working correctly, because
generated shaders differ per theme (only light theme is available, the dark
theme has been disabled). We will support multiple themes in the future, but
this is not on our priority list right now.
[3857]: https://github.com/enso-org/enso/pull/3857
[3985]: https://github.com/enso-org/enso/pull/3985
[4047]: https://github.com/enso-org/enso/pull/4047
[4003]: https://github.com/enso-org/enso/pull/4003
#### Enso Standard Library

114
Cargo.lock generated
View File

@ -1178,7 +1178,7 @@ version = "0.1.0"
dependencies = [
"Inflector",
"serde",
"serde_yaml 0.8.26",
"serde_yaml",
]
[[package]]
@ -1863,6 +1863,7 @@ dependencies = [
"derive_more",
"dirs",
"enso-build-base",
"ensogl-pack",
"filetime",
"flate2",
"flume",
@ -1898,7 +1899,7 @@ dependencies = [
"semver 1.0.14",
"serde",
"serde_json",
"serde_yaml 0.9.14",
"serde_yaml",
"shrinkwraprs 0.3.0",
"strum",
"sysinfo",
@ -1927,10 +1928,21 @@ dependencies = [
"futures-util",
"serde",
"serde_json",
"serde_yaml 0.9.14",
"serde_yaml",
"tracing",
]
[[package]]
name = "enso-build-ci-gen"
version = "0.1.0"
dependencies = [
"enso-build",
"enso-build-shader-tools",
"ide-ci",
"serde_yaml",
"tokio",
]
[[package]]
name = "enso-build-cli"
version = "0.1.0"
@ -1951,7 +1963,7 @@ dependencies = [
"octocrab",
"serde",
"serde_json",
"serde_yaml 0.9.14",
"serde_yaml",
"strum",
"tempfile",
"tokio",
@ -1970,11 +1982,23 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
"serde_yaml 0.9.14",
"serde_yaml",
"shrinkwraprs 0.3.0",
"syn",
]
[[package]]
name = "enso-build-shader-tools"
version = "0.1.0"
dependencies = [
"html_parser",
"ide-ci",
"octocrab",
"regex",
"tempfile",
"tokio",
]
[[package]]
name = "enso-build-utilities"
version = "0.1.0"
@ -2664,6 +2688,7 @@ name = "ensogl-example-auto-layout"
version = "0.1.0"
dependencies = [
"ensogl-core",
"ensogl-hardcoded-theme",
"wasm-bindgen",
]
@ -2953,6 +2978,8 @@ dependencies = [
name = "ensogl-hardcoded-theme"
version = "0.1.0"
dependencies = [
"enso-prelude",
"enso-shapely",
"ensogl-core",
]
@ -2979,6 +3006,20 @@ dependencies = [
"ensogl-text",
]
[[package]]
name = "ensogl-pack"
version = "0.1.0"
dependencies = [
"enso-prelude",
"fs_extra",
"ide-ci",
"manifest-dir-macros",
"regex",
"tempfile",
"tokio",
"walkdir",
]
[[package]]
name = "ensogl-scroll-area"
version = "0.1.0"
@ -3760,6 +3801,20 @@ version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8371fb981840150b1a54f7cb117bf6699f7466a1d4861daac33bc6fe2b5abea0"
[[package]]
name = "html_parser"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec016cabcf7c9c48f9d5fdc6b03f273585bfce640a0f47a69552039e92b1959a"
dependencies = [
"pest",
"pest_derive",
"serde",
"serde_derive",
"serde_json",
"thiserror",
]
[[package]]
name = "http"
version = "0.2.8"
@ -4034,7 +4089,7 @@ dependencies = [
"semver 1.0.14",
"serde",
"serde_json",
"serde_yaml 0.9.14",
"serde_yaml",
"sha2",
"shrinkwraprs 0.3.0",
"strum",
@ -4533,12 +4588,6 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565"
[[package]]
name = "linked-hash-map"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "linux-raw-sys"
version = "0.1.4"
@ -4596,6 +4645,18 @@ dependencies = [
"tokio-stream",
]
[[package]]
name = "manifest-dir-macros"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f08150cf2bab1fc47c2196f4f41173a27fcd0f684165e5458c0046b53a472e2f"
dependencies = [
"once_cell",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "matchers"
version = "0.1.0"
@ -5961,9 +6022,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.6.0"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
dependencies = [
"aho-corasick",
"memchr",
@ -6399,21 +6460,9 @@ dependencies = [
[[package]]
name = "serde_yaml"
version = "0.8.26"
version = "0.9.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b"
dependencies = [
"indexmap",
"ryu",
"serde",
"yaml-rust",
]
[[package]]
name = "serde_yaml"
version = "0.9.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d232d893b10de3eb7258ff01974d6ee20663d8e833263c99409d4b13a0209da"
checksum = "92b5b431e8907b50339b51223b97d102db8d987ced36f6e4d03621db9316c834"
dependencies = [
"indexmap",
"itoa 1.0.3",
@ -8106,15 +8155,6 @@ version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "114ba2b24d2167ef6d67d7d04c8cc86522b87f490025f39f0303b7db5bf5e3d8"
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "zeroize"
version = "1.5.7"

View File

@ -8,14 +8,17 @@ members = [
"app/gui/enso-profiler-enso-data",
"build/cli",
"build/macros",
"build/enso-formatter",
"build/ci-gen",
"build/cli",
"build/intellij-run-config-gen",
"build/deprecated/rust-scripts",
"build/shader-tools",
"lib/rust/*",
"lib/rust/parser/src/syntax/tree/visitor",
"lib/rust/parser/jni",
"lib/rust/parser/generate-java",
"lib/rust/parser/debug",
"lib/rust/ensogl/pack",
"lib/rust/profiler/data",
"lib/rust/profiler/demo-data",
"integration-test",
@ -71,6 +74,11 @@ debug-assertions = true
[workspace.dependencies]
console-subscriber = "0.1.8"
nix = "0.26.1"
octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [
"rustls"
] }
regex = "1.6.0"
serde_yaml = "0.9.16"
serde-wasm-bindgen = "0.4.5"
tokio = { version = "1.23.0", features = ["full", "tracing"] }
tokio-util = { version = "0.7.4", features = ["full"] }

View File

@ -64,7 +64,7 @@ wasm-bindgen-futures = "0.4"
websocket = "0.26.5"
[dev-dependencies]
regex = { version = "1.3.6" }
regex = { workspace = true }
wasm-bindgen-test = { workspace = true }
[dependencies.web-sys]

View File

@ -19,7 +19,7 @@ mod js {
#[wasm_bindgen(inline_js = "
export function remote_log(msg, value) {
try {
window.enso.remoteLog(msg,value)
window.ensoglApp.remoteLog(msg,value)
} catch (error) {
console.error(\"Error while logging message. \" + error );
}

View File

@ -13,10 +13,8 @@
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
use enso_logger::*;
use enso_prelude::*;
use enso_logger::DefaultWarningLogger as Logger;
use ensogl::system::web;
@ -40,36 +38,35 @@ pub fn engine_version_requirement() -> semver::VersionReq {
// ============
ensogl::read_args! {
[window_app_scope_name, window_app_scope_config_name] {
entry : String,
project : String,
project_manager : String,
language_server_rpc : String,
language_server_data : String,
namespace : String,
platform : web::platform::Platform,
frame : bool,
theme : String,
dark_theme : bool,
high_contrast : bool,
use_loader : bool,
wasm_url : String,
wasm_glue_url : String,
node_labels : bool,
crash_report_host : String,
data_gathering : bool,
mixpanel_token : String,
is_in_cloud : bool,
verbose : bool,
authentication_enabled : bool,
email : String,
application_config_url : String,
/// When profiling the application (e.g. with the `./run profile` command), this argument
/// chooses what is profiled.
test_workflow : String,
skip_min_version_check : bool,
preferred_engine_version : semver::Version,
enable_new_component_browser : bool,
emit_user_timing_measurements : bool,
}
application_config_url: String,
authentication_enabled: bool,
dark_theme: bool,
data_gathering: bool,
debug: bool,
email: Option<String>,
emit_user_timing_measurements: bool,
enable_new_component_browser: bool,
enable_spector:bool,
entry: String,
frame: bool,
is_in_cloud: bool,
language_server_data: Option<String>,
language_server_rpc: Option<String>,
loader_download_to_init_ratio: f32,
max_before_main_entry_points_time_ms: f32,
namespace: Option<String>,
node_labels: bool,
pkg_js_url: String,
pkg_wasm_url: String,
platform: Option<web::platform::Platform>,
preferred_engine_version: Option<semver::Version>,
project: Option<String>,
project_manager: Option<String>,
shaders_url: String,
skip_min_version_check: bool,
/// When profiling the application (e.g. with the `./run profile` command), this argument
/// chooses what is profiled.
test_workflow: Option<String>,
theme: String,
use_loader: bool,
}

View File

@ -23,5 +23,5 @@ serde = { version = "1.0", features = ["derive"] }
uuid = { version = "0.8", features = ["serde", "v4", "wasm-bindgen"] }
[dev-dependencies]
regex = { version = "1.3.6" }
regex = { workspace = true }
wasm-bindgen-test = { workspace = true }

View File

@ -11,7 +11,7 @@ crate-type = ["cdylib", "rlib"]
derive_more = { version = "0.99.16" }
failure = { version = "0.1.5" }
lazy_static = { version = "1.4.0" }
regex = { version = "1" }
regex = { workspace = true }
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = { version = "1.0" }
shrinkwraprs = { version = "0.2.1" }

View File

@ -15,9 +15,9 @@ use enso_config::ARGS;
// ==============
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Fail)]
#[derive(Clone, Debug, Fail)]
#[fail(display = "Missing program option: {}.", 0)]
pub struct MissingOption(&'static str);
pub struct MissingOption(String);
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Fail)]

View File

@ -75,7 +75,7 @@ impl Initializer {
config::InitialView::Project => view.switch_view_to_project(),
}
if enso_config::ARGS.emit_user_timing_measurements.unwrap_or_default() {
if enso_config::ARGS.emit_user_timing_measurements {
ensogl_app.display.connect_profiler_to_user_timing();
}
let status_bar = view.status_bar().clone_ref();
@ -249,7 +249,7 @@ pub fn register_views(app: &Application) {
type PlaceholderEntryType = ensogl_component::list_view::entry::Label;
app.views.register::<ensogl_component::list_view::ListView<PlaceholderEntryType>>();
if enso_config::ARGS.is_in_cloud.unwrap_or(false) {
if enso_config::ARGS.is_in_cloud {
app.views.register::<ide_view::window_control_buttons::View>();
}
}

View File

@ -62,7 +62,6 @@
extern crate core;
use prelude::*;
use wasm_bindgen::prelude::*;
// ==============

View File

@ -1,7 +1,6 @@
//! Defines profilable workflows, and an entry point that runs a specified workflow.
use crate::integration_test::prelude::*;
use wasm_bindgen::prelude::*;
use enso_debug_api as debug_api;

View File

@ -37,14 +37,12 @@
#![warn(unused_qualifications)]
use ensogl_core::prelude::*;
use wasm_bindgen::prelude::*;
use ensogl_core::application::Application;
use ensogl_core::display;
use ensogl_core::display::navigation::navigator::Navigator;
use ensogl_core::display::object::ObjectOps;
use ensogl_core::frp;
use ensogl_hardcoded_theme as theme;
use ensogl_text as text;
use ide_view_component_list_panel::grid;
use ide_view_component_list_panel::grid::entry::icon;
@ -190,9 +188,6 @@ fn snap_to_pixel_offset(size: Vector2, scene_shape: &display::scene::Shape) -> V
pub fn main() {
ensogl_text_msdf::run_once_initialized(|| {
let app = Application::new("root");
theme::builtin::light::register(&app);
theme::builtin::light::enable(&app);
let world = &app.display;
let scene = &world.default_scene;

View File

@ -8,7 +8,6 @@
use ensogl::display::shape::*;
use ensogl::prelude::*;
use wasm_bindgen::prelude::*;
use enso_suggestion_database as suggestion_database;
use enso_suggestion_database::doc_section;
@ -184,8 +183,6 @@ mod button {
pub fn main() {
ensogl_text_msdf::run_once_initialized(|| {
let app = Application::new("root");
ensogl_hardcoded_theme::builtin::light::register(&app);
ensogl_hardcoded_theme::builtin::light::enable(&app);
let _registry = Registry::with_default_visualizations();
let wrapper = DatabaseWrapper::from_db(database());

View File

@ -49,9 +49,6 @@ mod frame {
#[allow(dead_code)]
pub fn entry_point_searcher_icons() {
let app = Application::new("root");
ensogl_hardcoded_theme::builtin::dark::register(&app);
ensogl_hardcoded_theme::builtin::light::register(&app);
ensogl_hardcoded_theme::builtin::light::enable(&app);
let world = app.display.clone();
mem::forget(app);
let scene = &world.default_scene;

View File

@ -19,7 +19,6 @@ use ast::crumbs::PatternMatchCrumb::*;
use ast::crumbs::*;
use ensogl::prelude::*;
use span_tree::traits::*;
use wasm_bindgen::prelude::*;
use enso_frp as frp;
use ensogl::application::Application;

View File

@ -15,7 +15,6 @@
#![warn(unused_qualifications)]
use ensogl::prelude::*;
use wasm_bindgen::prelude::*;
use crate::text_visualization::TextGrid;
@ -86,10 +85,6 @@ fn init(app: &Application) {
.expect("Failed to add font to HTML body.");
let closure = ensogl::system::web::Closure::new(move |_| {
ensogl_hardcoded_theme::builtin::dark::register(&app);
ensogl_hardcoded_theme::builtin::light::register(&app);
ensogl_hardcoded_theme::builtin::light::enable(&app);
let world = &app.display;
let scene = &world.default_scene;
let camera = scene.camera();
@ -126,6 +121,5 @@ fn init(app: &Application) {
let _result = web::document.fonts().ready().unwrap().then(&closure);
// This extends the lifetime of the closure which is what we want here. Otherwise, the closure
// would be destroyed and the callback cannot be called.
#[allow(clippy::forget_non_drop)]
mem::forget(closure);
}

View File

@ -15,7 +15,6 @@
#![warn(unused_qualifications)]
use ensogl::prelude::*;
use wasm_bindgen::prelude::*;
use ensogl::animation;
use ensogl::application::Application;

View File

@ -40,6 +40,7 @@ use super::GridVector;
use super::GridWindow;
// =================
// === GridCache ===
// =================

View File

@ -126,7 +126,7 @@ pub mod backdrop {
use super::*;
ensogl::shape! {
// Disable to allow interaction with the output port.
// Disabled to allow interaction with the output port.
pointer_events = false;
(style:Style, selection:f32) {
@ -732,7 +732,7 @@ impl Node {
deselect_target <- input.deselect.constant(0.0);
select_target <- input.select.constant(1.0);
selection.target <+ any(&deselect_target,&select_target);
selection.target <+ any(&deselect_target, &select_target);
eval selection.value ((t) model.backdrop.selection.set(*t));

View File

@ -238,7 +238,7 @@ impl Model {
#[profile(Debug)]
fn set_label(&self, content: impl Into<String>) {
let str = if ARGS.node_labels.unwrap_or(true) { content.into() } else { default() };
let str = if ARGS.node_labels { content.into() } else { default() };
self.label.set_content(str);
}

View File

@ -104,8 +104,7 @@ const MAX_ZOOM: f32 = 1.0;
fn traffic_lights_gap_width() -> f32 {
let is_macos = ARGS.platform.map(|p| p.is_macos()) == Some(true);
let is_frameless = ARGS.frame == Some(false);
if is_macos && is_frameless {
if is_macos && !ARGS.frame {
MACOS_TRAFFIC_LIGHTS_CONTENT_WIDTH + MACOS_TRAFFIC_LIGHTS_SIDE_OFFSET
} else {
0.0

View File

@ -146,7 +146,7 @@ pub enum SearcherVariant {
impl SearcherVariant {
fn new(app: &Application) -> Self {
if ARGS.enable_new_component_browser.unwrap_or(true) {
if ARGS.enable_new_component_browser {
Self::ComponentBrowser(app.new_view::<component_browser::View>())
} else {
Self::OldNodeSearcher(Rc::new(app.new_view::<searcher::View>()))
@ -261,7 +261,7 @@ impl Model {
let code_editor = app.new_view::<code_editor::View>();
let fullscreen_vis = default();
let debug_mode_popup = debug_mode_popup::View::new(app);
let window_control_buttons = ARGS.is_in_cloud.unwrap_or_default().as_some_from(|| {
let window_control_buttons = ARGS.is_in_cloud.as_some_from(|| {
let window_control_buttons = app.new_view::<crate::window_control_buttons::View>();
display_object.add_child(&window_control_buttons);
scene.layers.panel.add(&window_control_buttons);
@ -293,6 +293,9 @@ impl Model {
}
/// Sets style of IDE to the one defined by parameter `theme`.
///
/// This does not change the EnsoGL theme. Changing it is not supported currently because
/// the theme is used for shader-precompilation.
pub fn set_style(&self, theme: Theme) {
match theme {
Theme::Light => self.set_light_style(),
@ -301,12 +304,10 @@ impl Model {
}
fn set_light_style(&self) {
ensogl_hardcoded_theme::builtin::light::enable(&self.app);
self.set_html_style("light-theme");
}
fn set_dark_style(&self) {
ensogl_hardcoded_theme::builtin::dark::enable(&self.app);
self.set_html_style("dark-theme");
}
@ -318,7 +319,7 @@ impl Model {
if let Some(node) = self.graph_editor.nodes().get_cloned_ref(&node_id) {
node.position().xy()
} else {
error!("Trying to show searcher under nonexisting node");
error!("Trying to show searcher under non existing node");
default()
}
}
@ -446,21 +447,11 @@ impl Deref for View {
impl View {
/// Constructor.
pub fn new(app: &Application) -> Self {
ensogl_hardcoded_theme::builtin::dark::register(app);
ensogl_hardcoded_theme::builtin::light::register(app);
let theme = match ARGS.theme.as_deref() {
Some("dark") => {
ensogl_hardcoded_theme::builtin::dark::enable(app);
Theme::Dark
}
_ => {
ensogl_hardcoded_theme::builtin::light::enable(app);
Theme::Light
}
let theme = match ARGS.theme.as_ref() {
"dark" => Theme::Dark,
_ => Theme::Light,
};
display::style::javascript::expose_to_window(&app.themes);
let scene = app.display.default_scene.clone_ref();
let model = Model::new(app);
let frp = Frp::new();
@ -476,7 +467,6 @@ impl View {
model.set_style(theme);
// TODO[WD]: This should not be needed after the theme switching issue is implemented.
// See: https://github.com/enso-org/ide/issues/795
app.themes.update();
let input_change_delay = frp::io::timer::Timeout::new(network);
if let Some(window_control_buttons) = &*model.window_control_buttons {

View File

@ -1,6 +0,0 @@
/**
* Configuration options that are only used by the JavaScript part of the project.
*/
export const defaultLogServerPort = 20060
export const defaultLogServerHost = `localhost:${defaultLogServerPort}`

View File

@ -1,6 +1,5 @@
'use strict'
import { defaultLogServerHost } from '../../../config.js'
import assert from 'node:assert'
import buildCfg from '../../../build.json'
import Electron from 'electron'
@ -223,14 +222,6 @@ optParser.options('version', {
describe: `Print the version`,
})
optParser.options('crash-report-host', {
describe:
'The address of the server that will receive crash reports. ' +
'Consists of a hostname, optionally followed by a ":" and a port number',
requiresArg: true,
default: defaultLogServerHost,
})
optParser.options('data-gathering', {
describe: 'Enable the sharing of any usage data',
type: 'boolean',

View File

@ -1,20 +0,0 @@
{
"type": "module",
"version": "1.0.0",
"author": {
"name": "Enso Team",
"email": "contact@enso.org"
},
"homepage": "https://github.com/enso-org/ide",
"repository": {
"type": "git",
"url": "git@github.com:enso-org/ide.git"
},
"bugs": {
"url": "https://github.com/enso-org/ide/issues"
},
"name": "enso-studio-common",
"dependencies": {
"mime-types": "^2.1.35"
}
}

View File

@ -1,18 +0,0 @@
/// This module defines a simple set of animation utils. Follow the link to learn more:
/// https://easings.net/en
// =================
// === Animation ===
// =================
export function ease_in_out_cubic(t) {
return t < 0.5 ? 4 * t * t * t : 1 - ((-2 * t + 2) * (-2 * t + 2) * (-2 * t + 2)) / 2
}
export function ease_in_out_quad(t) {
return t < 0.5 ? 2 * t * t : 1 - ((-2 * t + 2) * (-2 * t + 2)) / 2
}
export function ease_out_quart(t) {
return 1 - --t * t * t * t
}

View File

@ -1,33 +0,0 @@
// ==================
// === HTML Utils ===
// ==================
/// Remove the given node if it exists.
export function remove_node(node) {
if (node) {
node.parentNode.removeChild(node)
}
}
/// Creates a new top-level div which occupy full size of its parent's space.
export function new_top_level_div() {
let node = document.createElement('div')
node.style.width = '100%'
node.style.height = '100%'
document.body.appendChild(node)
return node
}
/// Log subsequent messages in a group.
export async function log_group_collapsed(msg, f) {
console.groupCollapsed(msg)
let out
try {
out = await f()
} catch (error) {
console.groupEnd()
throw error
}
console.groupEnd()
return out
}

View File

@ -1,259 +0,0 @@
import * as animation from './animation.js'
import * as html_utils from './html_utils.js'
import * as math from './math.js'
import * as svg from './svg.js'
// =========================
// === ProgressIndicator ===
// =========================
let bg_color = 'rgb(249,250,251)'
let loader_color = '#303030'
let top_layer_index = 1000
/// Visual representation of the loader.
export class ProgressIndicator {
constructor(cfg) {
this.dom = html_utils.new_top_level_div()
this.dom.id = 'loader'
this.dom.style.position = 'fixed'
this.dom.style.top = 0
this.dom.style.left = 0
// In the Cloud UI, all layers are stacked, and the progress
// indicator must be placed at the top layer.
this.dom.style.zIndex = top_layer_index
let center = document.createElement('div')
center.style.width = '100%'
center.style.height = '100%'
center.style.display = 'flex'
center.style.justifyContent = 'center'
center.style.alignItems = 'center'
this.dom.appendChild(center)
let progress_bar_svg = this.init_svg()
let progress_bar = document.createElement('div')
progress_bar.innerHTML = progress_bar_svg
center.appendChild(progress_bar)
this.progress_indicator = document.getElementById('progress_indicator')
this.progress_indicator_mask = document.getElementById('progress_indicator_mask')
this.progress_indicator_corner = document.getElementById('progress_indicator_corner')
this.set(0)
this.set_opacity(0)
if (cfg.use_loader) {
this.initialized = this.animate_show()
} else {
this.initialized = new Promise(resolve => {
resolve()
})
}
this.animate_rotation()
this.destroyed = false
}
/// Initializes the SVG view.
init_svg() {
let width = 128
let height = 128
let alpha = 0.9
let inner_radius = 48
let outer_radius = 60
let mid_radius = (inner_radius + outer_radius) / 2
let bar_width = outer_radius - inner_radius
return svg.new_svg(
width,
height,
`
<defs>
<g id="progress_bar">
<circle fill="${loader_color}" r="${outer_radius}" />
<circle fill="${bg_color}" r="${inner_radius}" />
<path fill="${bg_color}" opacity="${alpha}" id="progress_indicator_mask" />
<circle fill="${loader_color}" r="${
bar_width / 2
}" id="progress_indicator_corner" />
<circle fill="${loader_color}" r="${
bar_width / 2
}" cy="-${mid_radius}" />
</g>
</defs>
<g transform="translate(${width / 2},${height / 2})">
<g transform="rotate(0,0,0)" id="progress_indicator">
<use xlink:href="#progress_bar"></use>
</g>
</g>
`
)
}
/// Destroys the component. Removes it from the stage and destroys attached callbacks.
destroy() {
html_utils.remove_node(this.dom)
this.destroyed = true
}
/// Set the value of the loader [0..1].
set(value) {
let min_angle = 0
let max_angle = 359
let angle_span = max_angle - min_angle
let mask_angle = (1 - value) * angle_span - min_angle
let corner_pos = math.polar_to_cartesian(54, -mask_angle)
this.progress_indicator_mask.setAttribute('d', svg.arc(128, -mask_angle))
this.progress_indicator_corner.setAttribute('cx', corner_pos.x)
this.progress_indicator_corner.setAttribute('cy', corner_pos.y)
}
/// Set the opacity of the loader.
set_opacity(val) {
this.progress_indicator.setAttribute('opacity', val)
}
/// Set the rotation of the loader (angles).
set_rotation(val) {
this.progress_indicator.setAttribute('transform', `rotate(${val},0,0)`)
}
/// Start show animation. It is used after the loader is created.
animate_show() {
let indicator = this
return new Promise(function (resolve, reject) {
let alpha = 0
function show_step() {
if (alpha > 1) {
alpha = 1
}
indicator.set_opacity(animation.ease_in_out_quad(alpha))
alpha += 0.02
if (alpha < 1) {
window.requestAnimationFrame(show_step)
} else {
resolve()
}
}
window.requestAnimationFrame(show_step)
})
}
/// Start the spinning animation.
animate_rotation() {
let indicator = this
let rotation = 0
function rotate_step(timestamp) {
indicator.set_rotation(rotation)
rotation += 6
if (!indicator.destroyed) {
window.requestAnimationFrame(rotate_step)
}
}
window.requestAnimationFrame(rotate_step)
}
}
// ==============
// === Loader ===
// ==============
/// The main loader class. It connects to the provided fetch responses and tracks their status.
export class Loader {
constructor(resources, cfg) {
this.indicator = new ProgressIndicator(cfg)
this.total_bytes = 0
this.received_bytes = 0
this.download_speed = 0
this.last_receive_time = performance.now()
this.initialized = this.indicator.initialized
this.cap_progress_at = 0.3
let self = this
this.done_resolve = null
this.done = new Promise(resolve => {
self.done_resolve = resolve
})
for (let resource of resources) {
this.total_bytes += parseInt(resource.headers.get('Content-Length'))
resource.clone().body.pipeTo(this.input_stream())
}
if (Number.isNaN(this.total_bytes)) {
console.error(
"Loader error. Server is not configured to send the 'Content-Length' metadata."
)
this.total_bytes = 0
}
}
/// The current loading progress [0..1].
value() {
if (this.total_bytes == 0) {
return 0.3
} else {
return this.received_bytes / this.total_bytes
}
}
/// Returns true if the loader finished.
is_done() {
return this.received_bytes == this.total_bytes
}
/// Removes the loader with it's dom element.
destroy() {
this.indicator.destroy()
}
/// Callback run on every new received byte stream.
on_receive(new_bytes) {
this.received_bytes += new_bytes
let time = performance.now()
let time_diff = time - this.last_receive_time
this.download_speed = new_bytes / time_diff
this.last_receive_time = time
let percent = this.show_percentage_value()
let speed = this.show_download_speed()
let received = this.show_received_bytes()
console.log(`${percent}% (${received}) (${speed}).`)
let indicator_progress = this.value() * this.cap_progress_at
this.indicator.set(indicator_progress)
if (this.is_done()) {
this.done_resolve()
}
}
/// Download percentage value.
show_percentage_value() {
return Math.round(100 * this.value())
}
/// Download total size value.
show_total_bytes() {
return `${math.format_mb(this.total_bytes)} MB`
}
/// Download received bytes value.
show_received_bytes() {
return `${math.format_mb(this.received_bytes)} MB`
}
/// Download speed value.
show_download_speed() {
return `${math.format_mb(1000 * this.download_speed)} MB/s`
}
/// Internal function for attaching new fetch responses.
input_stream() {
let loader = this
return new WritableStream({
write(t) {
loader.on_receive(t.length)
},
})
}
}

View File

@ -1,19 +0,0 @@
/// This module defines a common math operations.
// ============
// === Math ===
// ============
/// Converts the polar coordinates to cartesian ones.
export function polar_to_cartesian(radius, angle_degrees) {
let angle = ((angle_degrees - 90) * Math.PI) / 180.0
return {
x: radius * Math.cos(angle),
y: radius * Math.sin(angle),
}
}
/// Format bytes as megabytes with a single precision number.
export function format_mb(bytes) {
return Math.round((10 * bytes) / (1024 * 1024)) / 10
}

View File

@ -1,30 +0,0 @@
/// This module defines a set of utils for generating and modifying the SVG images.
import * as math from './math.js'
// ===========
// === SVG ===
// ===========
/// Defines a new SVG with the provided source.
export function new_svg(width, height, str) {
return `
<svg version="1.1" baseProfile="full" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
height="${height}" width="${width}" viewBox="0 0 ${height} ${width}">
${str}
</svg>`
}
/// Returns SVG code for an arc with a defined radius and angle.
export function arc(radius, end_angle) {
let start_angle = 0
if (end_angle < 0) {
start_angle = end_angle
end_angle = 0
}
let start = math.polar_to_cartesian(radius, end_angle)
let end = math.polar_to_cartesian(radius, start_angle)
let large_arc = end_angle - start_angle <= 180 ? '0' : '1'
return `M 0 0 L ${start.x} ${start.y} A ${radius} ${radius} 0 ${large_arc} 0 ${end.x} ${end.y}`
}

View File

@ -33,13 +33,18 @@ export const thisPath = path.resolve(dirname(fileURLToPath(import.meta.url)))
// === Environment variables ===
// =============================
export const wasm_path = require_env('ENSO_BUILD_GUI_WASM')
export const js_glue_path = require_env('ENSO_BUILD_GUI_JS_GLUE')
/** List of files to be copied from WASM artifacts. */
export const wasm_artifacts = require_env('ENSO_BUILD_GUI_WASM_ARTIFACTS')
/** Directory with assets. Its contents are to be copied. */
export const assets_path = require_env('ENSO_BUILD_GUI_ASSETS')
/** Path where bundled files are output. */
export const output_path = path.resolve(require_env('ENSO_BUILD_GUI'), 'assets')
/** The main JS bundle to load WASM and JS wasm-pack bundles. */
export const ensogl_app_path = require_env('ENSO_BUILD_GUI_ENSOGL_APP')
// ===================
// === Git process ===
// ===================
@ -67,7 +72,7 @@ const always_copied_files = [
path.resolve(thisPath, 'src', 'run.js'),
path.resolve(thisPath, 'src', 'style.css'),
path.resolve(thisPath, 'src', 'docsStyle.css'),
wasm_path,
...wasm_artifacts.split(path.delimiter),
]
/**
@ -88,14 +93,14 @@ async function* files_to_copy_provider() {
const config: esbuild.BuildOptions = {
bundle: true,
entryPoints: ['src/index.ts', 'src/wasm_imports.js'],
entryPoints: ['src/index.ts'],
outdir: output_path,
outbase: 'src',
plugins: [
plugin_yaml.yamlPlugin({}),
NodeModulesPolyfillPlugin(),
NodeGlobalsPolyfillPlugin({ buffer: true, process: true }),
aliasPlugin({ wasm_rust_glue: js_glue_path }),
aliasPlugin({ ensogl_app: ensogl_app_path }),
timePlugin(),
copy_plugin.create(files_to_copy_provider),
],
@ -108,6 +113,7 @@ const config: esbuild.BuildOptions = {
minify: true,
metafile: true,
publicPath: '/assets',
platform: 'node',
incremental: true,
color: true,
logOverride: {

View File

@ -1,8 +0,0 @@
authDomain: "enso-org.firebaseapp.com"
projectId: "enso-org"
storageBucket: "enso-org.appspot.com"
messagingSenderId: "451746386966"
appId: "1:451746386966:web:558a832abe486208d61137"
measurementId: "G-W11ZNCQ476"
clientId: "451746386966-u5piv17hgvnimpq5ic5p60liekcqmqmu.apps.googleusercontent.com"
apiKey: "AIzaSyA99Ap9yN-RmNeb6dYIiUYPTCamLAZxTQ8"

View File

@ -21,12 +21,10 @@
},
"dependencies": {
"@types/semver": "^7.3.9",
"enso-studio-common": "1.0.0",
"firebase": "^9.14.0",
"firebaseui": "^6.0.2",
"html-loader": "^4.2.0",
"mixpanel-browser": "2.45.0",
"enso-gui-server": "^1.0.0"
"enso-gui-server": "^1.0.0",
"@types/mixpanel-browser": "^2.38.0"
},
"devDependencies": {
"@esbuild-plugins/node-modules-polyfill": "^0.1.4",

View File

@ -29,11 +29,6 @@
<title>Enso</title>
<link rel="stylesheet" href="/assets/style.css" />
<link rel="stylesheet" href="/assets/docsStyle.css" />
<link
type="text/css"
rel="stylesheet"
href="https://www.gstatic.com/firebasejs/ui/4.8.0/firebase-ui-auth.css"
/>
<script type="module" src="/assets/index.js" defer></script>
<script type="module" src="/assets/run.js" defer></script>
</head>

File diff suppressed because it is too large Load Diff

View File

@ -44,8 +44,8 @@ export function create(files_provider) {
build.onStart(async () => {
console.log('Initial options:', build.initialOptions)
console.log('Collecting files to copy.')
files = files_provider()
console.log('Collecting files to copy.', files)
})
build.onResolve({ filter: new RegExp(magic) }, async resolve => {
console.log('Resolving ', resolve)

View File

@ -17,6 +17,8 @@ export async function start({ root, assets, port }) {
const freePort = await portfinder.getPortPromise({ port: port ?? DEFAULT_PORT })
// FIXME: There is an issue probably related with improper caches of served files. Read more
// here: https://github.com/expressjs/serve-static/issues/155
const app = connect()
.use(logger('dev', { skip: (req, res) => res.statusCode < 400 }))
.use(serveStatic(root))

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,6 @@
"private": true,
"workspaces": [
"lib/client",
"lib/common",
"lib/content",
"lib/copy-plugin",
"lib/icons",

View File

@ -11,5 +11,5 @@ futures-util = "0.3.24"
futures = "0.3.24"
serde = "1.0.145"
serde_json = "1.0.85"
serde_yaml = "0.9.13"
serde_yaml = { workspace = true }
tracing = "0.1.36"

View File

@ -5,6 +5,7 @@ use crate::prelude::*;
use crate::extensions::os_str::OsStrExt;
use serde::de::DeserializeOwned;
use std::env::consts::EXE_EXTENSION;
@ -141,6 +142,24 @@ pub trait PathExt: AsRef<Path> {
ret.extend(self.as_ref().file_name());
ret
}
/// Replace the filename extension with the default executable extension for the current OS.
///
/// ```
/// # use enso_build_base::prelude::*;
/// let path = Path::new("foo").with_executable_extension();
/// // Windows:
/// #[cfg(target_os = "windows")]
/// assert_eq!(path, Path::new("foo.exe"));
/// // Other platforms:
/// #[cfg(not(target_os = "windows"))]
/// assert_eq!(path, Path::new("foo"));
/// ```
fn with_executable_extension(&self) -> PathBuf {
let mut ret = self.as_ref().to_path_buf();
ret.set_extension(EXE_EXTENSION);
ret
}
}
impl<T: AsRef<Path>> PathExt for T {}

View File

@ -33,6 +33,7 @@ handlebars = "4.3.5"
heck = "0.4.0"
humantime = "2.1.0"
enso-build-base = { path = "../base" }
ensogl-pack = { path = "../../lib/rust/ensogl/pack" }
ide-ci = { path = "../ci_utils" }
indexmap = "1.7.0"
indicatif = "0.17.1"
@ -42,9 +43,7 @@ lazy_static = "1.4.0"
mime = "0.3.16"
new_mime_guess = "4.0.1"
nix = { workspace = true }
octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [
"rustls"
] }
octocrab = { workspace = true }
ouroboros = "0.15.0"
paste = "1.0.7"
path-absolutize = "3.0.11"
@ -55,14 +54,14 @@ port_check = "0.1.5"
pretty_env_logger = "0.4.0"
pulldown-cmark = "0.9.1"
rand = "0.8.4"
regex = "1.5.4"
regex = { workspace = true }
reqwest = { version = "0.11.5", default-features = false, features = [
"stream"
] }
semver = { version = "1.0.4", features = ["serde"] }
serde = { version = "1.0.130", features = ["derive"] }
serde_json = "1.0.68"
serde_yaml = "0.9.10"
serde_yaml = { workspace = true }
scopeguard = "1.1.0"
shrinkwraprs = "0.3.0"
strum = { version = "0.24.0", features = ["derive"] }
@ -84,4 +83,4 @@ zip = { version = "0.6.2", default-features = false, features = ["deflate"] }
[build-dependencies]
ide-ci = { path = "../ci_utils" }
serde_yaml = "0.9.10"
serde_yaml = { workspace = true }

View File

@ -1,6 +1,6 @@
use enso_build::prelude::*;
use enso_build::setup_octocrab;
use ide_ci::github::setup_octocrab;
use ide_ci::github::Repo;
use octocrab::models::ReleaseId;

View File

@ -11,6 +11,7 @@
promote.yml:
release.yml:
scala-new.yml:
shader-tools.yml:
app/:
gui/:
ide-desktop/:
@ -52,12 +53,12 @@
# Final WASM artifacts in `dist` directory.
wasm/:
? path: ide.wasm
var: wasm_main
? path: ide_bg.wasm
var: wasm_main_raw
? path: ide.js
var: wasm_glue
shaders/: # Optimized shaders that contain main function code only.
index.cjs: # The main JS bundle to load WASM and JS wasm-pack bundles.
index.d.ts: # TypeScript types interface file.
pkg.js: # The `pks.js` artifact of wasm-pack WITH bundled snippets.
pkg.wasm: # The `pks_bg.wasm` artifact of wasm-pack.
pkg-opt.wasm: # The optimized `pks_bg.wasm`.
distribution/:
editions/:
<edition>.yaml:

View File

@ -4,11 +4,11 @@ use crate::ci_gen::job::expose_os_specific_signing_secret;
use crate::ci_gen::job::plain_job;
use crate::ci_gen::job::plain_job_customized;
use crate::ci_gen::job::RunsOn;
use crate::version::promote::Designation;
use crate::version::ENSO_EDITION;
use crate::version::ENSO_RELEASE_MODE;
use crate::version::ENSO_VERSION;
use enso_build::version::promote::Designation;
use enso_build::version::ENSO_EDITION;
use enso_build::version::ENSO_RELEASE_MODE;
use enso_build::version::ENSO_VERSION;
use ide_ci::actions::workflow::definition::checkout_repo_step;
use ide_ci::actions::workflow::definition::is_non_windows_runner;
use ide_ci::actions::workflow::definition::is_windows_runner;
@ -34,6 +34,7 @@ use ide_ci::actions::workflow::definition::WorkflowCall;
use ide_ci::actions::workflow::definition::WorkflowDispatch;
use ide_ci::actions::workflow::definition::WorkflowDispatchInput;
use ide_ci::actions::workflow::definition::WorkflowDispatchInputType;
use ide_ci::actions::workflow::definition::WorkflowToWrite;
use strum::IntoEnumIterator;
@ -171,7 +172,7 @@ pub fn setup_customized_script_steps(
command_line: impl AsRef<str>,
customize: impl FnOnce(Step) -> Vec<Step>,
) -> Vec<Step> {
use enso_build::ci::labels::CLEAN_BUILD_REQUIRED;
use crate::ci::labels::CLEAN_BUILD_REQUIRED;
// Check if the pull request has a "Clean required" label.
let pre_clean_condition =
format!("contains(github.event.pull_request.labels.*.name, '{CLEAN_BUILD_REQUIRED}')",);
@ -495,14 +496,22 @@ pub fn benchmark() -> Result<Workflow> {
Ok(workflow)
}
pub fn generate(repo_root: &enso_build::paths::generated::RepoRootGithubWorkflows) -> Result {
repo_root.changelog_yml.write_as_yaml(&changelog()?)?;
repo_root.nightly_yml.write_as_yaml(&nightly()?)?;
repo_root.scala_new_yml.write_as_yaml(&backend()?)?;
repo_root.gui_yml.write_as_yaml(&gui()?)?;
repo_root.benchmark_yml.write_as_yaml(&benchmark()?)?;
repo_root.release_yml.write_as_yaml(&release()?)?;
repo_root.promote_yml.write_as_yaml(&promote()?)?;
Ok(())
/// Generate workflows for the CI.
pub fn generate(
repo_root: &crate::paths::generated::RepoRootGithubWorkflows,
) -> Result<Vec<WorkflowToWrite>> {
let workflows = [
(repo_root.changelog_yml.to_path_buf(), changelog()?),
(repo_root.nightly_yml.to_path_buf(), nightly()?),
(repo_root.scala_new_yml.to_path_buf(), backend()?),
(repo_root.gui_yml.to_path_buf(), gui()?),
(repo_root.benchmark_yml.to_path_buf(), benchmark()?),
(repo_root.release_yml.to_path_buf(), release()?),
(repo_root.promote_yml.to_path_buf(), promote()?),
];
let workflows = workflows
.into_iter()
.map(|(path, workflow)| WorkflowToWrite { workflow, path, source: module_path!().into() })
.collect();
Ok(workflows)
}

View File

@ -155,13 +155,13 @@ impl JobArchetype for DeployRuntime {
plain_job_customized(&os, "Upload Runtime to ECR", "release deploy-runtime", |step| {
let step = step
.with_secret_exposed_as("CI_PRIVATE_TOKEN", "GITHUB_TOKEN")
.with_env("ENSO_BUILD_ECR_REPOSITORY", enso_build::aws::ecr::runtime::NAME)
.with_env("crate_ECR_REPOSITORY", crate::aws::ecr::runtime::NAME)
.with_secret_exposed_as(secret::ECR_PUSH_RUNTIME_ACCESS_KEY_ID, "AWS_ACCESS_KEY_ID")
.with_secret_exposed_as(
secret::ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY,
"AWS_SECRET_ACCESS_KEY",
)
.with_env("AWS_DEFAULT_REGION", enso_build::aws::ecr::runtime::REGION);
.with_env("AWS_DEFAULT_REGION", crate::aws::ecr::runtime::REGION);
vec![step]
})
}
@ -187,32 +187,29 @@ impl JobArchetype for DeployGui {
pub fn expose_os_specific_signing_secret(os: OS, step: Step) -> Step {
match os {
OS::Windows => step
.with_secret_exposed_as(
secret::WINDOWS_CERT_PATH,
&enso_build::ide::web::env::WIN_CSC_LINK,
)
.with_secret_exposed_as(secret::WINDOWS_CERT_PATH, &crate::ide::web::env::WIN_CSC_LINK)
.with_secret_exposed_as(
secret::WINDOWS_CERT_PASSWORD,
&enso_build::ide::web::env::WIN_CSC_KEY_PASSWORD,
&crate::ide::web::env::WIN_CSC_KEY_PASSWORD,
),
OS::MacOS => step
.with_secret_exposed_as(
secret::APPLE_CODE_SIGNING_CERT,
&enso_build::ide::web::env::CSC_LINK,
&crate::ide::web::env::CSC_LINK,
)
.with_secret_exposed_as(
secret::APPLE_CODE_SIGNING_CERT_PASSWORD,
&enso_build::ide::web::env::CSC_KEY_PASSWORD,
&crate::ide::web::env::CSC_KEY_PASSWORD,
)
.with_secret_exposed_as(
secret::APPLE_NOTARIZATION_USERNAME,
&enso_build::ide::web::env::APPLEID,
&crate::ide::web::env::APPLEID,
)
.with_secret_exposed_as(
secret::APPLE_NOTARIZATION_PASSWORD,
&enso_build::ide::web::env::APPLEIDPASS,
&crate::ide::web::env::APPLEIDPASS,
)
.with_env(&enso_build::ide::web::env::CSC_IDENTITY_AUTO_DISCOVERY, "true"),
.with_env(&crate::ide::web::env::CSC_IDENTITY_AUTO_DISCOVERY, "true"),
_ => step,
}
}

View File

@ -1,6 +1,7 @@
use crate::prelude::*;
use enso_build::paths;
use crate::paths;
use ide_ci::actions::workflow::definition::env_expression;
use ide_ci::actions::workflow::definition::Step;

View File

@ -1,6 +1,6 @@
use crate::prelude::*;
use ide_ci::github::release::ReleaseHandle;
use ide_ci::github::release;
use octocrab::models::repos::Asset;
@ -34,7 +34,7 @@ pub trait IsArtifact: AsRef<Path> + Send + Sync {
Ok(self.as_ref().try_parent()?.try_file_name()?.to_os_string())
}
fn upload_as_asset(&self, release: ReleaseHandle) -> BoxFuture<'static, Result<Asset>> {
fn upload_as_asset(&self, release: release::Handle) -> BoxFuture<'static, Result<Asset>> {
let path = self.as_ref().to_path_buf();
let name = self.asset_file_stem();
async move { release.upload_compressed_dir_as(path, name?).await }.boxed()

View File

@ -550,7 +550,7 @@ impl RunContext {
ReleaseCommand::Upload => {
let artifacts = self.build().await?;
let release_id = crate::env::ENSO_RELEASE_ID.get()?;
let release = ide_ci::github::release::ReleaseHandle::new(
let release = ide_ci::github::release::Handle::new(
&self.inner.octocrab,
repo,
release_id,

View File

@ -9,19 +9,24 @@ use crate::project::ProcessWrapper;
use anyhow::Context;
use futures_util::future::try_join;
use futures_util::future::try_join4;
use ide_ci::github::RepoRef;
use ide_ci::io::download_all;
use ide_ci::program::command;
use ide_ci::program::EMPTY_ARGS;
use ide_ci::programs::node::NpmCommand;
use ide_ci::programs::Npm;
use octocrab::models::repos::Content;
use std::process::Stdio;
use tempfile::TempDir;
use tokio::process::Child;
use tracing::Span;
// ==============
// === Export ===
// ==============
pub mod google_font;
lazy_static! {
/// Path to the file with build information that is consumed by the JS part of the IDE.
@ -35,10 +40,6 @@ pub const IDE_ASSETS_URL: &str =
pub const ARCHIVED_ASSET_FILE: &str = "ide-assets-main/content/assets/";
pub const GOOGLE_FONTS_REPOSITORY: RepoRef = RepoRef { owner: "google", name: "fonts" };
pub const GOOGLE_FONT_DIRECTORY: &str = "ofl";
pub mod env {
use super::*;
@ -49,8 +50,10 @@ pub mod env {
ENSO_BUILD_PROJECT_MANAGER, PathBuf;
ENSO_BUILD_GUI, PathBuf;
ENSO_BUILD_ICONS, PathBuf;
ENSO_BUILD_GUI_WASM, PathBuf;
ENSO_BUILD_GUI_JS_GLUE, PathBuf;
/// List of files that should be copied to the Gui.
ENSO_BUILD_GUI_WASM_ARTIFACTS, Vec<PathBuf>;
/// The main JS bundle to load WASM and JS wasm-pack bundles.
ENSO_BUILD_GUI_ENSOGL_APP, PathBuf;
ENSO_BUILD_GUI_ASSETS, PathBuf;
ENSO_BUILD_IDE_BUNDLED_ENGINE_VERSION, Version;
ENSO_BUILD_PROJECT_MANAGER_IN_BUNDLE_PATH, PathBuf;
@ -101,28 +104,6 @@ impl command::FallibleManipulator for IconsArtifacts {
}
}
#[context("Failed to download Google font '{family}'.")]
#[instrument(fields(output_path = %output_path.as_ref().display()), ret, err, skip(octocrab))]
pub async fn download_google_font(
octocrab: &Octocrab,
family: &str,
output_path: impl AsRef<Path>,
) -> Result<Vec<Content>> {
let destination_dir = output_path.as_ref();
let repo = GOOGLE_FONTS_REPOSITORY.handle(octocrab);
let path = format!("{GOOGLE_FONT_DIRECTORY}/{family}");
let files = repo.repos().get_content().path(path).send().await?;
let ttf_files =
files.items.into_iter().filter(|file| file.name.ends_with(".ttf")).collect_vec();
for file in &ttf_files {
let destination_file = destination_dir.join(&file.name);
let url = file.download_url.as_ref().context("Missing 'download_url' in the reply.")?;
let reply = ide_ci::io::web::client::download(&octocrab.client, url).await?;
ide_ci::io::web::stream_to_file(reply, &destination_file).await?;
}
Ok(ttf_files)
}
/// Fill the directory under `output_path` with the assets.
pub async fn download_js_assets(output_path: impl AsRef<Path>) -> Result {
let output = output_path.as_ref();
@ -175,7 +156,8 @@ impl<Output: AsRef<Path>> ContentEnvironment<TempDir, Output> {
let installation = ide.install();
let asset_dir = TempDir::new()?;
let assets_download = download_js_assets(&asset_dir);
let fonts_download = download_google_font(&ide.octocrab, "mplus1", &asset_dir);
let fonts_download =
google_font::download_google_font(&ide.cache, &ide.octocrab, "mplus1", &asset_dir);
let (wasm, _, _, _) =
try_join4(wasm, installation, assets_download, fonts_download).await?;
ide.write_build_info(build_info)?;
@ -187,10 +169,13 @@ impl<Assets: AsRef<Path>, Output: AsRef<Path>> command::FallibleManipulator
for ContentEnvironment<Assets, Output>
{
fn try_applying<C: IsCommandWrapper + ?Sized>(&self, command: &mut C) -> Result {
let artifacts_for_gui =
self.wasm.files_to_ship().into_iter().map(|file| file.to_path_buf()).collect_vec();
command
.set_env(env::ENSO_BUILD_GUI, self.output_path.as_ref())?
.set_env(env::ENSO_BUILD_GUI_WASM, &self.wasm.wasm())?
.set_env(env::ENSO_BUILD_GUI_JS_GLUE, &self.wasm.js_glue())?
.set_env(env::ENSO_BUILD_GUI_WASM_ARTIFACTS, &artifacts_for_gui)?
.set_env(env::ENSO_BUILD_GUI_ENSOGL_APP, &self.wasm.ensogl_app())?
.set_env(env::ENSO_BUILD_GUI_ASSETS, self.asset_dir.as_ref())?;
Ok(())
}
@ -211,10 +196,12 @@ pub fn target_os_flag(os: OS) -> Result<&'static str> {
}
}
#[derive(Clone, Debug)]
#[derive(Clone, Derivative)]
#[derivative(Debug)]
pub struct IdeDesktop {
pub build_sbt: generated::RepoRootBuildSbt,
pub package_dir: generated::RepoRootAppIdeDesktop,
#[derivative(Debug = "ignore")]
pub octocrab: Octocrab,
pub cache: ide_ci::cache::Cache,
}

View File

@ -0,0 +1,178 @@
//! Downloading Google Fonts.
use crate::prelude::*;
use ide_ci::cache::Cache;
use ide_ci::cache::Storable;
use ide_ci::github;
use ide_ci::github::RepoRef;
use octocrab::models::repos;
// =================
// === Constants ===
// =================
/// Google Fonts repository.
pub const GOOGLE_FONTS_REPOSITORY: RepoRef = RepoRef { owner: "google", name: "fonts" };
/// Path to the directory on the Google Fonts repository where we get the fonts from.
///
/// The directory name denotes the license of the fonts. In our case this is SIL OPEN FONT LICENSE
/// Version 1.1, commonly known as OFL.
pub const GOOGLE_FONT_DIRECTORY: &str = "ofl";
/// We keep dependency to a fixed commit, so we can safely cache it.
///
/// There are no known reasons not to bump this.
pub const GOOGLE_FONT_SHA1: &str = "ea893a43af7c5ab5ccee189fc2720788d99887ed";
// ==============
// === Family ===
// ==============
/// Identifies uniquely a source of font family download.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Family {
/// Remote repository with fonts.
pub repo: github::Repo,
/// Which commit we want to be downloaded.
pub r#ref: String,
/// Font family. It corresponds to the subdirectories names (under the top-level
/// license-denoting directories).
pub name: String,
}
impl Family {
/// List content items in the repository that contain TTF files for the given font family.
pub async fn list_ttf(
&self,
handle: github::repo::Handle<impl IsRepo>,
) -> Result<Vec<repos::Content>> {
let path = format!("{GOOGLE_FONT_DIRECTORY}/{}", self.name);
let files = handle.repos().get_content().r#ref(&self.r#ref).path(path).send().await?;
Ok(files.items.into_iter().filter(|file| file.name.ends_with(".ttf")).collect())
}
}
// ====================
// === DownloadFont ===
// ====================
/// Description of the job to download the fonts.
#[derive(Derivative, Clone)]
#[derivative(Debug)]
pub struct DownloadFont {
pub family: Family,
/// Possible authentication to GitHub (to get bigger rate limit).
#[derivative(Debug = "ignore")]
pub octocrab: Octocrab,
}
impl DownloadFont {
/// Get a handle to the remote repository with the fonts.
pub fn handle(&self) -> github::repo::Handle<impl IsRepo> {
self.family.repo.handle(&self.octocrab)
}
/// Download the font family to the given directory. They will be placed in the output
/// directory. The function returns relative paths to the downloaded files.
pub async fn download(&self, output_path: impl AsRef<Path>) -> Result<Vec<PathBuf>> {
let files = self.family.list_ttf(self.handle()).await?;
let mut ret = Vec::new();
for file in &files {
let destination_file = output_path.as_ref().join(&file.name);
let url = file.download_url.as_ref().context("Missing 'download_url' in the reply.")?;
let reply = ide_ci::io::web::client::download(&self.octocrab.client, url).await?;
ide_ci::io::web::stream_to_file(reply, &destination_file).await?;
ret.push(file.name.as_str().into());
}
Ok(ret)
}
}
impl Storable for DownloadFont {
/// In metadata form we just store paths relative to the store.
type Metadata = Vec<PathBuf>;
/// Here paths are absolute.
type Output = Vec<PathBuf>;
type Key = Family;
fn generate(
&self,
_cache: Cache,
store: PathBuf,
) -> BoxFuture<'static, Result<Self::Metadata>> {
let this = self.clone();
async move {
let fonts = this.download(&store).await?;
Ok(fonts)
}
.boxed()
}
fn adapt(
&self,
cache: PathBuf,
mut metadata: Self::Metadata,
) -> BoxFuture<'static, Result<Self::Output>> {
async move {
for font in &mut metadata {
*font = cache.join(&font);
}
Ok(metadata)
}
.boxed()
}
fn key(&self) -> Self::Key {
self.family.clone()
}
}
// ===================
// === Entry Point ===
// ===================
pub async fn download_google_font(
cache: &Cache,
octocrab: &Octocrab,
family: &str,
output_path: impl AsRef<Path>,
) -> Result<Vec<PathBuf>> {
let family = Family {
repo: GOOGLE_FONTS_REPOSITORY.into(),
r#ref: GOOGLE_FONT_SHA1.into(),
name: family.into(),
};
let font = DownloadFont { family, octocrab: octocrab.clone() };
let cached_fonts = cache.get(font).await?;
let copy_futures =
cached_fonts.into_iter().map(|font| ide_ci::fs::tokio::copy_to(font, &output_path));
let result = futures::future::join_all(copy_futures).await.into_iter().try_collect()?;
Ok(result)
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
#[ignore]
async fn new_download() -> Result {
setup_logging()?;
let path = r"C:\temp\google_fonts2";
let octocrab = ide_ci::github::setup_octocrab().await?;
let cache = Cache::new_default().await?;
let aaa = download_google_font(&cache, &octocrab, "mplus1", path).await?;
dbg!(aaa);
Ok(())
}
}

View File

@ -49,6 +49,7 @@ pub mod aws;
pub mod bump_version;
pub mod changelog;
pub mod ci;
pub mod ci_gen;
pub mod config;
pub mod context;
pub mod engine;
@ -110,62 +111,10 @@ pub fn get_java_major_version(build_sbt_contents: &str) -> Result<java::Language
get_string_assignment_value(build_sbt_contents, "javaVersion")?.parse2()
}
pub fn retrieve_github_access_token() -> Result<String> {
fn get_token_from_file() -> Result<String> {
let path =
dirs::home_dir().context("Failed to locate home directory.")?.join("GITHUB_TOKEN");
debug!("Looking for GitHub token in the file {}", path.display());
let content = ide_ci::fs::read_to_string(path)?;
Ok(content.trim().into())
}
ide_ci::env::expect_var("GITHUB_TOKEN")
.inspect(|_| debug!("Will use GITHUB_TOKEN environment variable."))
.inspect_err(|e| debug!("Failed to retrieve GitHub authentication from environment: {e}"))
.or_else(|_| get_token_from_file())
}
#[context("Failed to setup GitHub API client.")]
pub async fn setup_octocrab() -> Result<Octocrab> {
let builder = octocrab::OctocrabBuilder::new();
let octocrab = if let Ok(access_token) = retrieve_github_access_token() {
let octocrab = builder.personal_token(access_token).build()?;
let username = octocrab
.current()
.user()
.await
.inspect_err(|e| warn!("Failed to retrieve GitHub username: {e}"))
.map_or_else(|_| "N/A".to_string(), |user| user.login);
info!("Using GitHub API with personal access token. Authenticated as {username}.",);
octocrab
} else {
info!("No GitHub Personal Access Token found. Will use anonymous API access.");
warn!(
"Anonymous GitHub API access is rate-limited. If you are experiencing issues, please \
set the GITHUB_TOKEN environment variable."
);
warn!(
"Additionally some APIs may not be available to anonymous users. This primarily \
pertains the release-related APIs."
);
builder.build()?
};
match octocrab.ratelimit().get().await {
Ok(rate) => info!(
"GitHub API rate limit: {}/{}.",
rate.resources.core.used, rate.resources.core.limit
),
Err(e) => bail!(
"Failed to get rate limit info: {e}. GitHub Personal Access Token might be invalid."
),
}
Ok(octocrab)
}
#[cfg(test)]
mod tests {
use super::*;
use ide_ci::github::setup_octocrab;
#[tokio::test]
#[ignore]

View File

@ -15,6 +15,7 @@ use derivative::Derivative;
use ide_ci::cache;
use ide_ci::fs::compressed_size;
use ide_ci::fs::copy_file_if_different;
use ide_ci::goodies::shader_tools::ShaderTools;
use ide_ci::programs::cargo;
use ide_ci::programs::wasm_opt;
use ide_ci::programs::wasm_opt::WasmOpt;
@ -217,6 +218,8 @@ impl IsTarget for Wasm {
// We want to be able to pass --profile this way.
WasmPack.require_present_that(VersionReq::parse(">=0.10.1")?).await?;
ShaderTools.install_if_missing(&cache).await?;
let BuildInput {
crate_path,
wasm_opt_options,
@ -236,34 +239,43 @@ impl IsTarget for Wasm {
info!("Building wasm.");
let temp_dir = tempdir()?;
let temp_dist = RepoRootDistWasm::new_root(temp_dir.path());
let mut command = WasmPack.cmd()?;
command
.current_dir(&repo_root)
.kill_on_drop(true)
.env_remove(ide_ci::programs::rustup::env::RUSTUP_TOOLCHAIN.name())
.build()
.arg(wasm_pack::Profile::from(*profile))
.target(wasm_pack::Target::Web)
.output_directory(&temp_dist)
.output_name(OUTPUT_NAME)
.arg(crate_path)
.arg("--")
.apply(&cargo::Color::Always)
.args(extra_cargo_options);
ensogl_pack::build(
ensogl_pack::WasmPackOutputs {
out_dir: temp_dist.path.clone(),
out_name: OUTPUT_NAME.into(),
},
|args| {
let mut command = WasmPack.cmd()?;
command
.current_dir(&repo_root)
.kill_on_drop(true)
.env_remove(ide_ci::programs::rustup::env::RUSTUP_TOOLCHAIN.name())
.build()
.arg(wasm_pack::Profile::from(*profile))
.target(wasm_pack::Target::Web)
.output_directory(args.out_dir)
.output_name(args.out_name)
.arg(crate_path)
.arg("--")
.apply(&cargo::Color::Always)
.args(extra_cargo_options);
if let Some(profiling_level) = profiling_level {
command.set_env(env::ENSO_MAX_PROFILING_LEVEL, &profiling_level)?;
}
command.set_env(env::ENSO_MAX_LOG_LEVEL, &log_level)?;
command.set_env(env::ENSO_MAX_UNCOLLAPSED_LOG_LEVEL, &uncollapsed_log_level)?;
command.run_ok().await?;
if let Some(profiling_level) = profiling_level {
command.set_env(env::ENSO_MAX_PROFILING_LEVEL, &profiling_level)?;
}
command.set_env(env::ENSO_MAX_LOG_LEVEL, &log_level)?;
command.set_env(env::ENSO_MAX_UNCOLLAPSED_LOG_LEVEL, &uncollapsed_log_level)?;
Ok(command)
},
)
.await?;
Self::finalize_wasm(wasm_opt_options, *skip_wasm_opt, *profile, &temp_dist).await?;
ide_ci::fs::create_dir_if_missing(&destination)?;
let ret = RepoRootDistWasm::new_root(&destination);
ide_ci::fs::copy(&temp_dist, &ret)?;
inner.perhaps_check_size(&ret.wasm_main).await?;
inner.perhaps_check_size(&ret.pkg_opt_wasm).await?;
Ok(Artifact(ret))
}
.instrument(span)
@ -401,14 +413,26 @@ impl Artifact {
pub fn new(path: impl Into<PathBuf>) -> Self {
Self(RepoRootDistWasm::new_root(path))
}
pub fn wasm(&self) -> &Path {
&self.0.wasm_main
/// The main JS bundle to load WASM and JS wasm-pack bundles.
pub fn ensogl_app(&self) -> &Path {
&self.0.index_cjs
}
pub fn js_glue(&self) -> &Path {
&self.0.wasm_glue
}
pub fn dir(&self) -> &Path {
&self.0.path
/// Files that should be shipped in the Gui bundle.
pub fn files_to_ship(&self) -> Vec<&Path> {
// We explicitly deconstruct object, so when new fields are added, we will be forced to
// consider whether they should be shipped or not.
let RepoRootDistWasm {
path: _,
shaders,
index_cjs: _,
index_d_ts: _,
pkg_js,
pkg_wasm: _,
pkg_opt_wasm,
} = &self.0;
vec![shaders.as_path(), pkg_js.as_path(), pkg_opt_wasm.as_path()]
}
}
@ -523,12 +547,12 @@ impl Wasm {
}
wasm_opt_command
.args(wasm_opt_options)
.arg(&temp_dist.wasm_main_raw)
.apply(&wasm_opt::Output(&temp_dist.wasm_main))
.arg(&temp_dist.pkg_wasm)
.apply(&wasm_opt::Output(&temp_dist.pkg_opt_wasm))
.run_ok()
.await?;
} else {
copy_file_if_different(&temp_dist.wasm_main_raw, &temp_dist.wasm_main)?;
copy_file_if_different(&temp_dist.pkg_wasm, &temp_dist.pkg_opt_wasm)?;
}
Ok(())
}

View File

@ -152,13 +152,14 @@ pub async fn publish_release(context: &BuildContext) -> Result {
let BuildContext { inner: project::Context { .. }, triple, .. } = context;
let release_id = crate::env::ENSO_RELEASE_ID.get()?;
let release_handle = remote_repo.release_handle(release_id);
debug!("Looking for release with id {release_id} on github.");
let release = remote_repo.repos().releases().get_by_id(release_id).await?;
let release = release_handle.get().await?;
ensure!(release.draft, "Release has been already published!");
debug!("Found the target release, will publish it.");
remote_repo.repos().releases().update(release.id.0).draft(false).send().await?;
release_handle.publish().await?;
debug!("Done. Release URL: {}", release.url);
let temp = tempdir()?;

View File

@ -1,3 +1,5 @@
//! Module for dealing with repositories owned by our project.
use crate::prelude::*;
@ -26,18 +28,15 @@ pub fn looks_like_enso_repository_root(path: impl AsRef<Path>) -> bool {
.unwrap_or(false)
}
/// Deduce the path to the root of the Enso repository.
///
/// This function will traverse the filesystem upwards from the binary location until it finds a
/// directory that looks like the root of the Enso repository.
#[instrument(ret, err)]
pub fn deduce_repository_path() -> Result<PathBuf> {
let candidate_paths = [
std::env::current_dir().ok(),
std::env::current_dir().ok().and_then(|p| p.parent().map(ToOwned::to_owned)),
std::env::current_dir().ok().and_then(|p| p.parent().map(|p| p.join("enso5"))),
std::env::current_dir().ok().and_then(|p| p.parent().map(|p| p.join("enso"))),
];
for candidate in candidate_paths {
if let Some(path) = candidate && looks_like_enso_repository_root(&path) {
return Ok(path)
}
let mut path = ide_ci::env::current_exe()?;
while !looks_like_enso_repository_root(&path) {
ensure!(path.pop(), "Failed to deduce repository path.");
}
bail!("Could not deduce repository path.")
Ok(path)
}

View File

@ -1,11 +1,15 @@
//! Code for dealing with the [Enso Cloud repository](https://github.com/enso-org/cloud-v2).
use crate::prelude::*;
use ide_ci::github::RepoRef;
/// The cloud repository.
pub const CLOUD_REPO: RepoRef = RepoRef { owner: "enso-org", name: "cloud-v2" };
/// The workflow we need to invoke to build the backend image.
pub const BUILD_IMAGE_WORKFLOW: &str = "build-image.yaml";
/// Build Image workflow input. Follows schema defined by
@ -34,7 +38,7 @@ pub async fn build_image_workflow_dispatch_input(octocrab: &Octocrab, version: &
#[cfg(test)]
mod tests {
use super::*;
use crate::setup_octocrab;
use ide_ci::github::setup_octocrab;
#[tokio::test]
#[ignore]

12
build/ci-gen/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
name = "enso-build-ci-gen"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ide-ci = { path = "../ci_utils" }
enso-build = { path = "../build" }
enso-build-shader-tools = { path = "../shader-tools" }
serde_yaml = { workspace = true }
tokio = { workspace = true }

46
build/ci-gen/src/main.rs Normal file
View File

@ -0,0 +1,46 @@
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
#![allow(clippy::bool_to_int_with_if)]
#![allow(clippy::let_and_return)]
use crate::prelude::*;
use enso_build::paths::generated::RepoRootGithub;
use enso_build::repo::deduce_repository_path;
use ide_ci::actions::workflow::definition::WorkflowToWrite;
pub mod prelude {
pub use enso_build::prelude::*;
pub use enso_build_shader_tools::prelude::*;
}
/// Generate the comment that is at the top of each generated workflow file.
fn preamble(source: &str) -> String {
format!(
"# This file is auto-generated. Do not edit it manually!\n\
# Edit the {source} module instead and run `cargo run --package {}`.",
env!("CARGO_PKG_NAME")
)
}
#[tokio::main]
async fn main() -> Result {
setup_logging()?;
let repo_root = deduce_repository_path()?;
let workflows_dir = RepoRootGithub::new_under(&repo_root).workflows;
let mut workflows = enso_build::ci_gen::generate(&workflows_dir)?;
workflows.push(enso_build_shader_tools::ci::generate_workflow(&workflows_dir.shader_tools_yml));
for WorkflowToWrite { source, path, workflow } in workflows {
let preamble = preamble(&source);
let yaml = serde_yaml::to_string(&workflow)?;
let contents = format!("{preamble}\n\n{yaml}");
ide_ci::fs::tokio::write(path, contents).await?;
}
warn!("Remember to run formatter on the generated files!");
Ok(())
}

View File

@ -44,9 +44,7 @@ mime = "0.3.16"
multimap = "0.8.3"
new_mime_guess = "4.0.0"
nix = { workspace = true }
octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [
"rustls"
] }
octocrab = { workspace = true }
paste = "1.0.7"
path-absolutize = "3.0.11"
pathdiff = "0.2.1"
@ -58,14 +56,14 @@ pretty_env_logger = "0.4.0"
proc-macro2 = "1.0"
quote = "1.0"
rand = "0.8.4"
regex = "1.5.4"
regex = { workspace = true }
reqwest = { version = "0.11.5", default-features = false, features = [
"stream"
] }
semver = { version = "1.0.4", features = ["serde"] }
serde = { version = "1.0.130", features = ["derive"] }
serde_json = "1.0.68"
serde_yaml = "0.9.10"
serde_yaml = { workspace = true }
scopeguard = "1.1.0"
sha2 = "0.10.2"
shrinkwraprs = "0.3.0"

View File

@ -1,3 +1,5 @@
//! Model of a workflow definition and related utilities.
use crate::prelude::*;
use crate::env::accessor::RawVariable;
@ -6,9 +8,15 @@ use heck::ToKebabCase;
use std::collections::btree_map::Entry;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::convert::identity;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
/// Default timeout for a job.
///
/// We use a very long timeout because we want to avoid cancelling jobs that are just slow.
pub const DEFAULT_TIMEOUT_IN_MINUTES: u32 = 360;
pub fn wrap_expression(expression: impl AsRef<str>) -> String {
@ -174,7 +182,9 @@ impl Workflow {
impl Workflow {
pub fn add_job(&mut self, job: Job) -> String {
let key = job.name.to_kebab_case();
self.jobs.insert(key.clone(), job);
if self.jobs.insert(key.clone(), job).is_some() {
warn!("Job with name {key} already exists.");
}
key
}
@ -573,6 +583,10 @@ pub enum JobSecrets {
Map(BTreeMap<String, String>),
}
/// Job is a top-level building block of a workflow.
///
/// It is scheduled to run on a specific runner. A workflow can have multiple jobs, they will run in
/// parallel by default.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct Job {
@ -604,10 +618,37 @@ pub struct Job {
}
impl Job {
pub fn new(name: impl Into<String>) -> Self {
Self { name: name.into(), timeout_minutes: Some(DEFAULT_TIMEOUT_IN_MINUTES), ..default() }
/// Create a new job definition.
pub fn new(
name: impl Into<String>,
runs_on: impl IntoIterator<Item: Into<RunnerLabel>>,
) -> Self {
Self {
name: name.into(),
timeout_minutes: Some(DEFAULT_TIMEOUT_IN_MINUTES),
runs_on: runs_on.into_iter().map(Into::into).collect(),
..Default::default()
}
}
/// Add a step to this job, while exposing the step's outputs as job outputs.
pub fn add_step_with_output(
&mut self,
mut step: Step,
outputs: impl IntoIterator<Item: Into<String>>,
) {
static COUNTER: AtomicU64 = AtomicU64::new(0);
// A step must have an unique id if we want to access its output.
let id =
step.id.unwrap_or_else(|| format!("step_{}", COUNTER.fetch_add(1, Ordering::SeqCst)));
step.id = Some(id.clone());
self.steps.push(step);
for output in outputs {
self.expose_output(&id, output);
}
}
/// Expose a step's output as a job output.
pub fn expose_output(&mut self, step_id: impl AsRef<str>, output_name: impl Into<String>) {
let step = step_id.as_ref();
let output = output_name.into();
@ -620,14 +661,19 @@ impl Job {
}
}
/// Define an environment variable for this job, it will be available to all steps.
pub fn env(&mut self, name: impl Into<String>, value: impl Into<String>) {
self.env.insert(name.into(), value.into());
}
/// Expose a secret as environment variable for this job.
pub fn expose_secret_as(&mut self, secret: impl AsRef<str>, given_name: impl Into<String>) {
self.env(given_name, format!("${{{{ secrets.{} }}}}", secret.as_ref()));
}
/// Expose outputs of another job as environment variables in this job.
///
/// This also adds a dependency on the other job.
pub fn use_job_outputs(&mut self, job_id: impl Into<String>, job: &Job) {
let job_id = job_id.into();
for output_name in job.outputs.keys() {
@ -637,14 +683,19 @@ impl Job {
self.needs(job_id);
}
/// Add a dependency on another job.
pub fn needs(&mut self, job_id: impl Into<String>) {
self.needs.insert(job_id.into());
}
/// Set an input for the invoked reusable workflow.
///
/// This is only valid if the job uses a reusable workflow.
pub fn with(&mut self, name: impl Into<String>, value: impl Into<String>) {
self.with.insert(name.into(), value.into());
}
/// Like [`with`](Self::with), but self-consuming.
pub fn with_with(mut self, name: impl Into<String>, value: impl Into<String>) -> Self {
self.with(name, value);
self
@ -817,6 +868,7 @@ pub enum CheckoutArgumentSubmodules {
pub mod step {
use super::*;
use crate::github;
#[derive(Clone, Debug, Serialize, Deserialize)]
@ -825,6 +877,11 @@ pub mod step {
pub enum Argument {
#[serde(rename_all = "kebab-case")]
Checkout {
#[serde(
skip_serializing_if = "Option::is_none",
with = "crate::serde::via_string_opt"
)]
repository: Option<github::Repo>,
#[serde(skip_serializing_if = "Option::is_none")]
clean: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
@ -879,7 +936,7 @@ pub enum RunnerLabel {
MatrixOs,
}
pub fn checkout_repo_step() -> impl IntoIterator<Item = Step> {
pub fn checkout_repo_step_customized(f: impl FnOnce(Step) -> Step) -> Vec<Step> {
// This is a workaround for a bug in GH actions/checkout. If a submodule is added and removed,
// it effectively breaks any future builds of this repository on a given self-hosted runner.
// The workaround step below comes from:
@ -921,12 +978,19 @@ pub fn checkout_repo_step() -> impl IntoIterator<Item = Step> {
// shallow copy of the repo.
uses: Some("actions/checkout@v2".into()),
with: Some(step::Argument::Checkout {
repository: None,
clean: Some(false),
submodules: Some(CheckoutArgumentSubmodules::Recursive),
}),
..default()
};
[submodules_workaround_win, submodules_workaround_linux, actual_checkout]
// Apply customization.
let actual_checkout = f(actual_checkout);
vec![submodules_workaround_win, submodules_workaround_linux, actual_checkout]
}
pub fn checkout_repo_step() -> impl IntoIterator<Item = Step> {
checkout_repo_step_customized(identity)
}
pub trait JobArchetype {
@ -957,3 +1021,14 @@ pub trait JobArchetype {
}
}
}
/// Describes the workflow to be stored on the disk.
#[derive(Clone, Debug)]
pub struct WorkflowToWrite {
/// The workflow to be stored.
pub workflow: Workflow,
/// The path where the workflow should be stored.
pub path: PathBuf,
/// Who generated this workflow.
pub source: String,
}

View File

@ -1,9 +1,64 @@
// use crate::prelude::*;
//
// use crate::models::config::RepoContext;
// use octocrab::repos::RepoHandler;
// use reqwest::RequestBuilder;
// use crate::cache::Cache;
// use crate::cache::Storable;
// use crate::github;
// use octocrab::models::AssetId;
// use reqwest::header::HeaderMap;
// use reqwest::header::HeaderValue;
//
// #[derive(Clone, Debug, Serialize, Deserialize)]
// pub struct Key {
// pub repository: github::Repo,
// pub asset_id: AssetId,
// }
//
// #[derive(Clone, Debug)]
// pub struct Asset {
// pub key: Key,
// pub octocrab: Octocrab,
// }
//
// impl Storable for Asset {
// type Metadata = ();
// type Output = PathBuf;
// type Key = Key;
//
// fn generate(&self, cache: Cache, store: PathBuf) -> BoxFuture<'static,
// Result<Self::Metadata>> { let this = self.clone();
// async move {
// let Asset { octocrab, key: Key { asset_id, repository } } = this;
// let url =
// format!("https://api.github.com/repos/{repository}/releases/assets/{asset_id}");
// let url = Url::parse(&url)?;
// let job = crate::cache::download::DownloadFile {
// client: octocrab.client.clone(),
// key: crate::cache::download::Key {
// url: url.clone(),
// additional_headers: HeaderMap::from_iter([(
// reqwest::header::ACCEPT,
// HeaderValue::from_static(mime::APPLICATION_OCTET_STREAM.as_ref()),
// )]),
// },
// };
// cache.get(job).await.map(|_| ())
// }
// .boxed()
// }
//
// fn adapt(
// &self,
// cache: PathBuf,
// _metadata: Self::Metadata,
// ) -> BoxFuture<'static, Result<Self::Output>> {
// ready(Result::Ok(cache)).boxed()
// }
//
// fn key(&self) -> Self::Key {
// self.key.clone()
// }
// }
// pub struct DownloadAsset {
// pub octocrab: Octocrab,
// pub repo: RepoContext,

View File

@ -14,11 +14,18 @@ pub mod sbt;
/// Something that can be downloaded and, after that, enabled by modifying global state.
/// Something that can be obtained (with IO) and, after that, enabled by modifying global state.
pub trait Goodie: Debug + Clone + Send + Sync + 'static {
fn url(&self) -> BoxFuture<'static, Result<Url>>;
/// Obtain and place this in the cache.
fn get(&self, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>>;
/// Check whether this is already present.
fn is_active(&self) -> BoxFuture<'static, Result<bool>>;
/// Changes to the environment to activate this.
fn activation_env_changes(&self, package_path: &Path) -> Result<Vec<crate::env::Modification>>;
/// Apply the activation environment changes.
fn activate(&self, package_path: &Path) -> Result {
for modification in self.activation_env_changes(package_path)? {
modification.apply()?;
@ -42,56 +49,38 @@ pub trait GoodieExt: Goodie {
trace!("Skipping activation of {this:?} because it already present.",);
Result::Ok(None)
} else {
let package = this.download(&cache).await?;
let package = this.get(&cache).await?;
this.activate(&package)?;
Result::Ok(Some(package))
}
}
.boxed()
}
fn package(
&self,
) -> BoxFuture<'static, Result<cache::archive::ExtractedArchive<cache::download::DownloadFile>>>
{
let url_fut = self.url();
async move {
let url = url_fut.await?;
let archive_source = cache::download::DownloadFile::new(url)?;
let path_to_extract = None;
Ok(cache::archive::ExtractedArchive { archive_source, path_to_extract })
}
.boxed()
}
fn download(&self, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
let package = self.package();
let cache = cache.clone();
async move { cache.get(package.await?).await }.boxed()
}
}
impl<T: Goodie> GoodieExt for T {}
//
// /// Whoever owns a token, can assume that the Goodie is available.
// #[derive(Clone, Debug, Display)]
// pub struct Token<G>(G);
//
// #[derive(Clone, Debug, Display)]
// pub struct PotentialFutureGoodie<G>(Box<dyn FnOnce() -> BoxFuture<'static, Result<Token<G>>>>);
//
// impl<G> PotentialFutureGoodie<G> {
// pub fn new<F, Fut>(f: F) -> Self
// where
// F: FnOnce() -> Fut + 'static,
// Fut: Future<Output = Result<Token<G>>> + Send + 'static, {
// Self(Box::new(move || f().boxed()))
// }
// }
//
// // pub type GoodieGenerator<G: Goodie> =
// // dyn FnOnce(Cache, G) -> BoxFuture<'static, Result<Token<G>>> + Send + Sync + 'static;
// //
// // pub type PotentialFutureGoodie<G: Goodie> =
// // dyn FnOnce(Cache) -> BoxFuture<'static, Result<Token<G>>> + Send + Sync + 'static;
pub fn download_url(url: Url, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
download_try_url(Ok(url), cache)
}
pub fn download_try_url(url: Result<Url>, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
download_try_future_url(ready(url), cache)
}
pub fn download_try_future_url(
url: impl Future<Output = Result<Url>> + Send + 'static,
cache: &Cache,
) -> BoxFuture<'static, Result<PathBuf>> {
let cache = cache.clone();
async move {
let url = url.await?;
let archive_source = cache::download::DownloadFile::new(url)?;
let package = cache::archive::ExtractedArchive { archive_source, path_to_extract: None };
cache.get(package).await
}
.boxed()
}

View File

@ -1,6 +1,7 @@
use crate::prelude::*;
use crate::cache;
use crate::cache::goodie;
use crate::cache::Cache;
use crate::env::known::PATH;
use crate::program::version::IsVersionPredicate;
use crate::programs::wasm_opt;
@ -20,22 +21,24 @@ impl IsVersionPredicate for Binaryen {
}
}
impl Binaryen {}
impl cache::Goodie for Binaryen {
fn url(&self) -> BoxFuture<'static, Result<Url>> {
impl Binaryen {
fn url(&self) -> Result<Url> {
let version = format!("version_{}", self.version);
async move {
let target = match (TARGET_OS, TARGET_ARCH) {
(OS::Windows, Arch::X86_64) => "x86_64-windows",
(OS::Linux, Arch::X86_64) => "x86_64-linux",
(OS::MacOS, Arch::X86_64) => "x86_64-macos",
(OS::MacOS, Arch::AArch64) => "arm64-macos",
(os, arch) => bail!("Not supported arch/OS combination: {arch}-{os}."),
};
let url = format!("https://github.com/WebAssembly/binaryen/releases/download/{version}/binaryen-{version}-{target}.tar.gz");
url.parse2()
}.boxed()
let target = match (TARGET_OS, TARGET_ARCH) {
(OS::Windows, Arch::X86_64) => "x86_64-windows",
(OS::Linux, Arch::X86_64) => "x86_64-linux",
(OS::MacOS, Arch::X86_64) => "x86_64-macos",
(OS::MacOS, Arch::AArch64) => "arm64-macos",
(os, arch) => bail!("Not supported arch/OS combination: {arch}-{os}."),
};
let url = format!("https://github.com/WebAssembly/binaryen/releases/download/{version}/binaryen-{version}-{target}.tar.gz");
url.parse2()
}
}
impl Goodie for Binaryen {
fn get(&self, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
goodie::download_try_url(self.url(), cache)
}
fn is_active(&self) -> BoxFuture<'static, Result<bool>> {

View File

@ -1,6 +1,8 @@
use crate::prelude::*;
use crate::cache::goodie;
use crate::cache::goodie::Goodie;
use crate::cache::Cache;
use crate::env::known::PATH;
use crate::github::RepoRef;
use crate::programs::java;
@ -45,18 +47,11 @@ pub struct GraalVM {
}
impl Goodie for GraalVM {
fn url(&self) -> BoxFuture<'static, Result<Url>> {
let platform_string = self.platform_string();
let graal_version = self.graal_version.clone();
let client = self.client.clone();
async move {
let repo = CE_BUILDS_REPOSITORY.handle(&client);
let release = repo.find_release_by_text(&graal_version.to_string()).await?;
crate::github::find_asset_url_by_text(&release, &platform_string).cloned()
}
.boxed()
fn get(&self, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
goodie::download_try_future_url(self.url(), cache)
}
fn is_active(&self) -> BoxFuture<'static, Result<bool>> {
let expected_graal_version = self.graal_version.clone();
let expected_java_language_version = self.java_version;
@ -90,6 +85,18 @@ impl Goodie for GraalVM {
}
impl GraalVM {
pub fn url(&self) -> BoxFuture<'static, Result<Url>> {
let platform_string = self.platform_string();
let graal_version = self.graal_version.clone();
let client = self.client.clone();
async move {
let repo = CE_BUILDS_REPOSITORY.handle(&client);
let release = repo.find_release_by_text(&graal_version.to_string()).await?;
crate::github::find_asset_url_by_text(&release, &platform_string).cloned()
}
.boxed()
}
pub fn platform_string(&self) -> String {
let Self { graal_version: _graal_version, java_version, arch, os, client: _client } = &self;
let os_name = match *os {

View File

@ -1,6 +1,7 @@
use crate::prelude::*;
use crate::cache;
use crate::cache::goodie;
use crate::cache::Cache;
use crate::env::known::PATH;
use crate::programs;
@ -15,9 +16,9 @@ crate::define_env_var! {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)]
pub struct Sbt;
impl cache::Goodie for Sbt {
fn url(&self) -> BoxFuture<'static, Result<Url>> {
ready(Url::parse(DOWNLOAD_URL_TEXT).anyhow_err()).boxed()
impl Goodie for Sbt {
fn get(&self, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
goodie::download_try_url(Url::from_str(DOWNLOAD_URL_TEXT), cache)
}
fn is_active(&self) -> BoxFuture<'static, Result<bool>> {

View File

@ -30,6 +30,11 @@ pub fn set_current_dir(path: impl AsRef<Path>) -> Result {
.with_context(|| format!("Failed to set current directory to {}.", path.as_ref().display()))
}
/// Like [`std::env::current_exe`], but with nicer error message.
pub fn current_exe() -> Result<PathBuf> {
std::env::current_exe().context("Failed to get current executable path.")
}
/// Like [`std::env::set_var`], but with log.
pub fn set_var<K: AsRef<OsStr>, V: AsRef<OsStr>>(key: K, value: V) {
debug!(
@ -63,6 +68,13 @@ pub fn remove_var<K: AsRef<OsStr>>(key: K) {
#[macro_export]
macro_rules! define_env_var {
() => {};
($(#[$attr:meta])* $name: ident, Vec<PathBuf>; $($tail:tt)*) => {
#[allow(non_upper_case_globals)]
$(#[$attr])*
pub const $name: $crate::env::accessor::PathLike =
$crate::env::accessor::PathLike(stringify!($name));
$crate::define_env_var!($($tail)*);
};
($(#[$attr:meta])* $name: ident, PathBuf; $($tail:tt)*) => {
#[allow(non_upper_case_globals)]
$(#[$attr])*
@ -101,7 +113,7 @@ pub fn expect_var_os(name: impl AsRef<OsStr>) -> Result<OsString> {
.with_context(|| format!("Missing environment variable {}.", name.to_string_lossy()))
}
pub fn prepend_to_path(path: impl Into<PathBuf>) -> Result {
pub fn prepend_to_path(path: impl AsRef<Path>) -> Result {
known::PATH.prepend(path)
}

View File

@ -210,8 +210,9 @@ impl TypedVariable for PathLike {
}
impl PathLike {
pub fn prepend(&self, value: impl Into<PathBuf>) -> Result {
let value = value.into();
#[context("Failed to prepend path `{}` to `{}`.", value.as_ref().display(), self.name())]
pub fn prepend(&self, value: impl AsRef<Path>) -> Result {
let value = value.as_ref().to_path_buf();
trace!("Prepending {} to {}.", value.display(), self.name());
let mut paths = self.get()?;
paths.insert(0, value);

View File

@ -4,13 +4,40 @@ use semver::Prerelease;
/// Extension methods for [`Version`].
pub trait VersionExt {
/// Get the version numbers, excluding the prerelease or build metadata.
fn core(&self) -> (u64, u64, u64);
/// Check if the version are the same while ignoring any prerelease or build metadata.
fn same_core(&self, other: &Self) -> bool {
self.core() == other.core()
}
/// Get the identifiers (i.e. the dot-separated parts after the hyphen) of the prerelease.
///
/// ```
/// # use semver::Version;
/// # use ide_ci::extensions::version::VersionExt;
/// assert!(Version::parse("1.2.3").unwrap().identifiers().is_empty());
/// assert_eq!(Version::parse("1.2.3-alpha").unwrap().identifiers(), vec!["alpha"]);
/// assert_eq!(Version::parse("1.2.3-alpha.1").unwrap().identifiers(), vec!["alpha", "1"]);
/// assert_eq!(Version::parse("1.2.3-alpha+build.1").unwrap().identifiers(), vec!["alpha"]);
/// ```
fn identifiers(&self) -> Vec<&str>;
/// Generate next minor version for this major release.
///
/// ```
/// # use semver::Version;
/// # use ide_ci::extensions::version::VersionExt;
/// let version = Version::parse("1.2.3-dev").unwrap();
/// assert_eq!(version.next_minor().to_string(), "1.3.0");
///
/// let version = Version::parse("2.2.0+fooo").unwrap();
/// assert_eq!(version.next_minor().to_string(), "2.3.0");
/// ```
fn next_minor(&self) -> Self;
}
impl VersionExt for Version {
@ -20,6 +47,9 @@ impl VersionExt for Version {
fn identifiers(&self) -> Vec<&str> {
self.pre.identifiers()
}
fn next_minor(&self) -> Self {
Version::new(self.major, self.minor + 1, 0)
}
}
pub trait PrereleaseExt {

View File

@ -1,8 +1,12 @@
//! Wrappers around the `std::fs` module, which provide better error messages and avoid some
//! typical pitfalls.
use crate::prelude::*;
use async_compression::tokio::bufread::GzipEncoder;
use async_compression::Level;
use fs_extra::dir::CopyOptions;
use fs_extra::error::ErrorKind;
// ==============
@ -16,21 +20,22 @@ pub use enso_build_base::fs::*;
/// Copy source item (file or a directory) to a destination directory, preserving the filename.
#[tracing::instrument(skip_all, fields(
src = %source_file.as_ref().display(),
dest = %dest_dir.as_ref().display()),
err)]
pub fn copy_to(source_file: impl AsRef<Path>, dest_dir: impl AsRef<Path>) -> Result {
pub fn copy_to(source_file: impl AsRef<Path>, dest_dir: impl AsRef<Path>) -> Result<PathBuf> {
require_exist(&source_file)?;
create_dir_if_missing(dest_dir.as_ref())?;
debug!("Will copy {} to {}", source_file.as_ref().display(), dest_dir.as_ref().display());
let mut options = CopyOptions::new();
options.overwrite = true;
fs_extra::copy_items(&[source_file], dest_dir, &options)?;
Ok(())
fs_extra::copy_items(&[&source_file], &dest_dir, &options).map_err(handle_fs_extra_error)?;
Ok(dest_dir.as_ref().join(source_file.as_ref().try_file_name()?))
}
/// Copy the item (file or a directory) to a destination path.
#[tracing::instrument(skip_all, fields(
src = %source_file.as_ref().display(),
dest = %destination_file.as_ref().display()),
@ -45,7 +50,8 @@ pub fn copy(source_file: impl AsRef<Path>, destination_file: impl AsRef<Path>) -
let mut options = fs_extra::dir::CopyOptions::new();
options.overwrite = true;
options.content_only = true;
fs_extra::dir::copy(source_file, destination_file, &options)?;
fs_extra::dir::copy(source_file, destination_file, &options)
.map_err(handle_fs_extra_error)?;
} else {
enso_build_base::fs::wrappers::copy(source_file, destination_file)?;
}
@ -55,7 +61,12 @@ pub fn copy(source_file: impl AsRef<Path>, destination_file: impl AsRef<Path>) -
Ok(())
}
/// Mirrors the directory (like `rsync`).
///
/// All files and directories from the source directory will be copied to the destination directory,
/// unless they are already present and have the same content.
/// Any files or directories that are present in the destination directory, but not in the source
/// directory, will be removed.
pub async fn mirror_directory(source: impl AsRef<Path>, destination: impl AsRef<Path>) -> Result {
create_dir_if_missing(destination.as_ref())?;
@ -79,6 +90,9 @@ pub async fn compressed_size(path: impl AsRef<Path>) -> Result<byte_unit::Byte>
crate::io::read_length(encoded_stream).await.map(into)
}
/// Copy the file to the destination path, unless the file already exists and has the same content.
///
/// If the directory is passed as the source, it will be copied recursively.
#[tracing::instrument(skip_all, fields(
src = %source.as_ref().display(),
dest = %target.as_ref().display()),
@ -99,8 +113,26 @@ pub async fn copy_if_different(source: impl AsRef<Path>, target: impl AsRef<Path
Ok(())
}
/// Create a symlink.
///
/// This function hides the platform differences between Windows and Unix.
#[context("Failed to create symlink {} => {}", src.as_ref().display(), dst.as_ref().display())]
pub fn symlink_auto(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result {
create_parent_dir_if_missing(&dst)?;
symlink::symlink_auto(&src, &dst).anyhow_err()
}
/// `fs_extra`'s error type is not friendly to `anyhow`, so we need to convert it manually.
///
/// Otherwise, we get just the message to look into the error kind, but the kind information is
/// lost.
pub fn handle_fs_extra_error(error: fs_extra::error::Error) -> anyhow::Error {
let message = error.to_string();
match error.kind {
ErrorKind::Io(inner) => anyhow::Error::new(inner),
ErrorKind::StripPrefix(inner) => anyhow::Error::new(inner),
ErrorKind::OsString(inner) => anyhow::Error::msg(inner.to_string_lossy().to_string()),
_ => return error.into(),
}
.context(message)
}

View File

@ -1,3 +1,5 @@
//! Asynchronous filesystem operations using tokio.
use crate::prelude::*;
use tokio::fs::File;
@ -77,15 +79,6 @@ pub async fn remove_dir_if_exists(path: impl AsRef<Path>) -> Result {
}
}
pub async fn perhaps_remove_dir_if_exists(dry_run: bool, path: impl AsRef<Path>) -> Result {
if dry_run {
info!("Would remove directory {}.", path.as_ref().display());
Ok(())
} else {
remove_dir_if_exists(path).await
}
}
/// Recreate directory, so it exists and is empty.
pub async fn reset_dir(path: impl AsRef<Path>) -> Result {
let path = path.as_ref();
@ -124,3 +117,90 @@ pub async fn append(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Resul
.await
.with_context(|| format!("Failed to write to file {}.", path.as_ref().display()))
}
/// Copy a file between directory subtrees, preserving the relative path.
///
/// Source file must be within the source directory subtree. Path can be either relative or
/// absolute.
///
/// Example:
/// ```
/// use ide_ci::prelude::*;
///
/// use ide_ci::fs::tokio::copy_between;
/// #[tokio::main]
/// async fn main() -> Result {
/// let tmp1 = tempfile::tempdir()?;
/// let relative_path = PathBuf::from_iter(["bin", "program"]);
/// let contents = "Hello, world!";
/// ide_ci::fs::tokio::write(tmp1.path().join_iter(&relative_path), contents).await?;
/// let tmp2 = tempfile::tempdir()?;
/// copy_between(tmp1.path(), tmp2.path(), &relative_path).await?;
///
/// let copied =
/// ide_ci::fs::tokio::read_to_string(tmp2.path().join_iter(&relative_path)).await?;
/// assert_eq!(contents, copied);
/// Ok(())
/// }
/// ```
pub async fn copy_between(
source_dir: impl AsRef<Path>,
destination_dir: impl AsRef<Path>,
source_file: impl AsRef<Path>,
) -> Result<PathBuf> {
let source_file = source_file.as_ref();
let source_file = if source_file.is_absolute() {
source_file.strip_prefix(source_dir.as_ref()).with_context(|| {
format!(
"Failed to strip prefix {} from {}.",
source_dir.as_ref().display(),
source_file.display()
)
})?
} else {
source_file
};
let source_path = source_dir.as_ref().join(source_file);
let destination_path = destination_dir.as_ref().join(source_file);
copy(&source_path, &destination_path)
.instrument(info_span!("copy_between", ?source_path, ?destination_path))
.await?;
Ok(destination_path)
}
/// Asynchronous version of [`crate::fs::copy`].
pub async fn copy(source_file: impl AsRef<Path>, destination_file: impl AsRef<Path>) -> Result {
let source_file = source_file.as_ref().to_path_buf();
let destination_file = destination_file.as_ref().to_path_buf();
tokio::task::spawn_blocking(move || crate::fs::copy(&source_file, &destination_file)).await?
}
/// Remove a regular file.
///
/// Does not fail if the file is not found.
#[context("Failed to remove file {}", path.as_ref().display())]
pub async fn remove_file_if_exists(path: impl AsRef<Path>) -> Result<()> {
let result = tokio::fs::remove_file(&path).await;
match result {
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
result => result.anyhow_err(),
}
}
/// Fail if the given path does not exist.
pub async fn require_exist(path: impl AsRef<Path>) -> Result {
if metadata(&path).await.is_ok() {
trace!("{} does exist.", path.as_ref().display());
Ok(())
} else {
bail!("{} does not exist.", path.as_ref().display())
}
}
/// Asynchronous version of [`crate::fs::copy_to`].
pub async fn copy_to(source_file: impl AsRef<Path>, dest_dir: impl AsRef<Path>) -> Result<PathBuf> {
let source_file = source_file.as_ref().to_path_buf();
let dest_dir = dest_dir.as_ref().to_path_buf();
tokio::task::spawn_blocking(move || crate::fs::copy_to(&source_file, &dest_dir)).await?
}

View File

@ -1,11 +1,14 @@
use crate::prelude::*;
use crate::define_env_var;
use octocrab::models::repos::Asset;
use octocrab::models::repos::Release;
const MAX_PER_PAGE: u8 = 100;
// ==============
// === Export ===
// ==============
pub mod model;
pub mod release;
@ -16,6 +19,78 @@ pub use repo::Repo;
pub use repo::RepoRef;
/// Maximum number of items per page in the GitHub API.
const MAX_PER_PAGE: u8 = 100;
define_env_var! {
/// GitHub Personal Access Token, used for authentication in GutHub API.
///
/// Can be [created using GitHub web UI](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token).
GITHUB_TOKEN, String;
}
/// Tries to retrieve the GitHub Personal Access Token from the environment.
pub fn retrieve_github_access_token() -> Result<String> {
fn get_token_from_file() -> Result<String> {
let path =
dirs::home_dir().context("Failed to locate home directory.")?.join("GITHUB_TOKEN");
debug!("Looking for GitHub token in the file {}", path.display());
let content = crate::fs::read_to_string(path)?;
Ok(content.trim().into())
}
GITHUB_TOKEN
.get()
.inspect(|_| debug!("Will use {GITHUB_TOKEN} environment variable."))
.inspect_err(|e| debug!("Failed to retrieve GitHub authentication from environment: {e}"))
.or_else(|_| get_token_from_file())
}
/// Prepare the octocrab (GitHub API client) using the authentication token from the environment.
#[context("Failed to setup GitHub API client.")]
pub async fn setup_octocrab() -> Result<Octocrab> {
let builder = octocrab::OctocrabBuilder::new();
let octocrab = if let Ok(access_token) = retrieve_github_access_token() {
let octocrab = builder.personal_token(access_token).build()?;
let username = octocrab
.current()
.user()
.await
.inspect_err(|e| warn!("Failed to retrieve GitHub username: {e}"))
.map_or_else(|_| "N/A".to_string(), |user| user.login);
info!("Using GitHub API with personal access token. Authenticated as {username}.",);
octocrab
} else {
info!("No GitHub Personal Access Token found. Will use anonymous API access.");
warn!(
"Anonymous GitHub API access is rate-limited. If you are experiencing issues, please \
set the GITHUB_TOKEN environment variable."
);
warn!(
"Additionally some APIs may not be available to anonymous users. This primarily \
pertains the release-related APIs."
);
builder.build()?
};
// LPrint rate limit. This both helps debugging related issues and allows to validate the
// GitHub access token.
octocrab
.ratelimit()
.get()
.await
.inspect(|rate| {
info!(
"GitHub API rate limit: {}/{}.",
rate.resources.core.used, rate.resources.core.limit
)
})
.context("Failed to get rate limit info. GitHub Personal Access Token might be invalid")?;
Ok(octocrab)
}
/// Goes over all the pages and returns result.
///
/// We prefer taking a future page result rather than page itself to be able to easily wrap both
@ -29,6 +104,7 @@ pub async fn get_all<T: DeserializeOwned>(
client.all_pages(first_page).await
}
/// Utility functions for dealing with organization-specific GitHub API.
#[async_trait]
pub trait IsOrganization {
/// Organization name.
@ -39,15 +115,18 @@ pub trait IsOrganization {
&self,
octocrab: &Octocrab,
) -> anyhow::Result<model::RegistrationToken> {
let path = iformat!("/orgs/{self.name()}/actions/runners/registration-token");
let name = self.name();
let path = format!("/orgs/{name}/actions/runners/registration-token");
let url = octocrab.absolute_url(path)?;
octocrab.post(url, EMPTY_REQUEST_BODY).await.map_err(Into::into)
octocrab.post(url, EMPTY_REQUEST_BODY).await.with_context(|| {
format!("Failed to generate runner registration token for organization {name}.")
})
}
/// The organization's URL.
fn url(&self) -> Result<Url> {
let url_text = iformat!("https://github.com/{self.name()}");
Url::parse(&url_text).map_err(Into::into)
Url::from_str(&url_text)
}
}
@ -97,6 +176,7 @@ pub async fn latest_runner_url(octocrab: &Octocrab, os: OS) -> Result<Url> {
find_asset_url_by_text(&latest_release, &platform_name).cloned()
}
/// Download and extract latest GitHub Actions runner package for a given system.
pub async fn fetch_runner(octocrab: &Octocrab, os: OS, output_dir: impl AsRef<Path>) -> Result {
let url = latest_runner_url(octocrab, os).await?;
crate::io::download_and_extract(url, output_dir).await

View File

@ -70,7 +70,7 @@ pub trait IsReleaseExt: IsRelease + Sync {
/// Upload a new asset to the release from a given file.
///
/// The filename will be used to name the asset and deduce MIME content type.
#[instrument(skip_all, fields(source = %path.as_ref().display()))]
#[instrument(skip_all, fields(source = %path.as_ref().display()), err)]
async fn upload_asset_file(&self, path: impl AsRef<Path> + Send) -> Result<Asset> {
let error_msg =
format!("Failed to upload an asset from the file under {}.", path.as_ref().display());
@ -121,6 +121,17 @@ pub trait IsReleaseExt: IsRelease + Sync {
.await
.anyhow_err()
}
async fn publish(&self) -> Result<Release> {
self.octocrab()
.repos(self.repo().owner(), self.repo().name())
.releases()
.update(self.id().0)
.draft(false)
.send()
.await
.with_context(|| format!("Failed to publish the release {}.", self.id()))
}
}
impl<T> IsReleaseExt for T where T: IsRelease + Sync {}
@ -128,7 +139,7 @@ impl<T> IsReleaseExt for T where T: IsRelease + Sync {}
/// A release on GitHub.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
pub struct ReleaseHandle {
pub struct Handle {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub repo: Repo,
pub id: ReleaseId,
@ -136,7 +147,7 @@ pub struct ReleaseHandle {
pub octocrab: Octocrab,
}
impl IsRelease for ReleaseHandle {
impl IsRelease for Handle {
fn id(&self) -> ReleaseId {
self.id
}
@ -150,7 +161,7 @@ impl IsRelease for ReleaseHandle {
}
}
impl ReleaseHandle {
impl Handle {
pub fn new(octocrab: &Octocrab, repo: impl Into<Repo>, id: ReleaseId) -> Self {
let repo = repo.into();
Self { repo, id, octocrab: octocrab.clone() }

View File

@ -1,3 +1,5 @@
//! Utilities to deal with GitHub repositories, particularly the ones with the fonts.
use crate::prelude::*;
use crate::cache::download::DownloadFile;
@ -22,11 +24,14 @@ use reqwest::Response;
/// Owned data denoting a specific GitHub repository.
///
/// See also [`RepoRef`] for a non-owning equivalent.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, derive_more::Display)]
#[display(fmt = "{}/{}", owner, name)]
pub struct Repo {
/// Owner - an organization's or user's name.
pub owner: String,
/// Repository name.
pub name: String,
}
@ -40,7 +45,7 @@ impl IsRepo for Repo {
}
}
/// Parse from strings in format "owner/name". Opposite of `Display`.
/// Parse from strings in format "owner/name". Opposite of [`Display`].
impl std::str::FromStr for Repo {
type Err = anyhow::Error;
@ -55,7 +60,14 @@ impl<'a> From<RepoRef<'a>> for Repo {
}
}
impl<'a> From<&'a Repo> for RepoRef<'a> {
fn from(value: &'a Repo) -> Self {
RepoRef { owner: &value.owner, name: &value.name }
}
}
impl Repo {
#[allow(missing_docs)]
pub fn new(owner: impl Into<String>, name: impl Into<String>) -> Self {
Self { owner: owner.into(), name: name.into() }
}
@ -70,6 +82,7 @@ impl Repo {
pub struct RepoRef<'a> {
/// Owner - an organization's or user's name.
pub owner: &'a str,
/// Repository name.
pub name: &'a str,
}
@ -84,6 +97,7 @@ impl<'a> IsRepo for RepoRef<'a> {
}
impl<'a> RepoRef<'a> {
#[allow(missing_docs)]
pub const fn new<T1, T2>(owner: &'a T1, name: &'a T2) -> Self
where
T1: ~const AsRef<str> + ?Sized,
@ -92,7 +106,7 @@ impl<'a> RepoRef<'a> {
}
}
/// Note that we chose to implemend `TryFrom` rather than `FromStr` for `RepoRef` because
/// Note that we chose to implement `TryFrom` rather than `FromStr` for `RepoRef` because
/// `FromStr` requires the parsed value to be owned (or at least lifetime-independent from input),
/// which is not the case for `RepoRef`.
impl<'a> TryFrom<&'a str> for RepoRef<'a> {
@ -109,28 +123,51 @@ impl<'a> TryFrom<&'a str> for RepoRef<'a> {
/// Any entity that uniquely identifies a GitHub-hosted repository.
#[async_trait]
pub trait IsRepo: Display {
/// Owner - an organization's or user's name.
fn owner(&self) -> &str;
/// Repository name.
fn name(&self) -> &str;
/// The repository's URL.
///
/// ```
/// # use ide_ci::github::Repo;
/// # use ide_ci::prelude::IsRepo;
/// let repo = Repo::new("enso-org", "enso");
/// assert_eq!(repo.url().unwrap().to_string(), "https://github.com/enso-org/enso/");
/// ```
fn url(&self) -> Result<Url> {
let url_text = iformat!("https://github.com/{self.owner()}/{self.name()}");
// Note the trailing `/`. It allows us to join further paths to the URL using Url::join.
let url_text = iformat!("https://github.com/{self.owner()}/{self.name()}/");
Url::parse(&url_text)
.context(format!("Failed to generate an URL for the {self} repository."))
.with_context(|| format!("Failed to parse URL from string '{url_text}'."))
.with_context(|| format!("Failed to generate URL for the repository {self}."))
}
/// Add GitHub API client to obtain the [`Handle`] to this repository.
fn handle(&self, octocrab: &Octocrab) -> Handle<Self>
where Self: Clone + Sized {
Handle { repo: self.clone(), octocrab: octocrab.clone() }
}
/// Add GitHub API client to obtain the [`Handle`] to this repository.
fn into_handle(self, octocrab: &Octocrab) -> Handle<Self>
where Self: Sized {
Handle { octocrab: octocrab.clone(), repo: self }
}
}
/// A handle to a specific GitHub repository.
///
/// It includes a client (so also an authentication token) and a repository.
#[derive(Debug, Clone)]
#[derive(Derivative, Clone)]
#[derivative(Debug)]
pub struct Handle<Repo> {
/// Octocrab client (includes authentication token).
#[derivative(Debug = "ignore")]
pub octocrab: Octocrab,
/// Repository designation.
pub repo: Repo,
}
@ -161,41 +198,54 @@ impl<R: IsRepo> Handle<R> {
let path =
iformat!("/repos/{self.owner()}/{self.name()}/actions/runners/registration-token");
let url = self.octocrab.absolute_url(path)?;
self.octocrab.post(url, EMPTY_REQUEST_BODY).await.context(format!(
"Failed to generate a runner registration token for the {self} repository."
))
self.octocrab.post(url, EMPTY_REQUEST_BODY).await.with_context(|| {
format!("Failed to generate a runner registration token for the {self} repository.")
})
}
/// Get the [RepoHandler](octocrab::repos::RepoHandler), which is octocrab's entry point for
/// most of the repository-related operations.
pub fn repos(&self) -> octocrab::repos::RepoHandler {
self.octocrab.repos(self.owner(), self.name())
}
/// List all the releases of this repository.
///
/// While this behavior is not documented anywhere, it seems that the GitHub API returns the
/// releases in the reverse chronological order (i.e. the newest release first).
pub async fn all_releases(&self) -> Result<Vec<Release>> {
github::get_all(
&self.octocrab,
self.repos().releases().list().per_page(MAX_PER_PAGE).send(),
)
.await
.context(format!("Failed to list all releases in the {self} repository."))
.with_context(|| format!("Failed to list all releases in the {self} repository."))
}
/// Get the latest release of this repository.
///
/// The latest release is the most recent non-prerelease, non-draft release, sorted by the
/// `created_at` attribute. The `created_at` attribute is the date of the commit used for the
/// release, and not the date when the release was drafted or published.
pub async fn latest_release(&self) -> Result<Release> {
self.repos()
.releases()
.get_latest()
.await
.context(format!("Failed to get the latest release in the {self} repository."))
.with_context(|| format!("Failed to get the latest release in the {self} repository."))
}
/// Get the information about release with the given id.
pub async fn find_release_by_id(&self, release_id: ReleaseId) -> Result<Release> {
let repo_handler = self.repos();
let releases_handler = repo_handler.releases();
releases_handler
.get_by_id(release_id)
.await
.context(format!("Failed to find release by id `{release_id}` in `{self}`."))
.with_context(|| format!("Failed to find release by id `{release_id}` in `{self}`."))
}
/// Get the latest release that satisfies the given predicate.
#[tracing::instrument(fields(%self), skip(predicate), err)]
pub async fn find_release_if(&self, predicate: impl Fn(&Release) -> bool) -> Result<Release> {
let releases = self.all_releases().await?;
@ -203,6 +253,7 @@ impl<R: IsRepo> Handle<R> {
release.context("Failed to find a release that satisfies the predicate.")
}
/// Get the latest release that contains the given substring in its tag name.
#[tracing::instrument(fields(%self, %text), err)]
pub async fn find_release_by_text(&self, text: &str) -> anyhow::Result<Release> {
self.find_release_if(|release| release.tag_name.contains(text))
@ -211,6 +262,7 @@ impl<R: IsRepo> Handle<R> {
.with_context(|| format!("No release with tag matching `{text}` in {self}."))
}
/// Get the release with the given tag name.
#[tracing::instrument(fields(%self, %text), err)]
pub async fn find_release_by_tag(&self, text: &str) -> anyhow::Result<Release> {
self.find_release_if(|release| release.tag_name == text)
@ -219,10 +271,13 @@ impl<R: IsRepo> Handle<R> {
.with_context(|| format!("No release with tag equal to `{text}` in {self}."))
}
/// Get a single [reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References) from
/// the repository's Git database.
pub async fn get_ref(&self, r#ref: &Reference) -> Result<Ref> {
self.repos().get_ref(r#ref).await.context(format!("Failed to get ref `{ref}` in {self}."))
}
/// Lookup artifact by name in a GitHub Actions workflow run.
#[tracing::instrument(fields(%self, %run_id, %name), err, ret)]
pub async fn find_artifact_by_name(
&self,
@ -236,24 +291,33 @@ impl<R: IsRepo> Handle<R> {
.per_page(100)
.send()
.await
.context(format!("Failed to list artifacts of run {run_id} in {self}."))?
.with_context(|| format!("Failed to list artifacts of run {run_id} in {self}."))?
.value
.context("Failed to find any artifacts.")?;
.with_context(|| {
format!("Failed to find any artifacts in the run {run_id} in {self}.")
})?;
artifacts
.into_iter()
.find(|artifact| artifact.name == name)
.context(format!("Failed to find artifact by name '{name}'."))
.with_context(|| format!("Failed to find artifact by name '{name}'."))
}
/// Download artifact by id in a GitHub Actions workflow run.
///
/// The artifact is always packed into a zip archive. This method returns its binary contents.
pub async fn download_artifact(&self, artifact_id: ArtifactId) -> Result<Bytes> {
// TODO: [mwu] Unfortunately the octocrab API does not support streaming the artifact,
// so we have to download it into memory first.
self.octocrab
.actions()
.download_artifact(self.owner(), self.name(), artifact_id, ArchiveFormat::Zip)
.await
.context(format!("Failed to download artifact with ID={artifact_id}."))
.with_context(|| format!("Failed to download artifact with ID={artifact_id}."))
}
/// Download artifact by id in a GitHub Actions workflow run and extract top-level zip archive
/// (the one implicitly introduced by GitHub) into the given output directory.
pub async fn download_and_unpack_artifact(
&self,
artifact_id: ArtifactId,
@ -264,11 +328,15 @@ impl<R: IsRepo> Handle<R> {
Ok(())
}
/// Get information about a release asset with a given id.
#[tracing::instrument(name="Get the asset information.", fields(self=%self), err)]
pub async fn asset(&self, asset_id: AssetId) -> Result<Asset> {
self.repos().releases().get_asset(asset_id).await.anyhow_err()
self.repos().releases().get_asset(asset_id).await.with_context(|| {
format!("Failed to get the asset information for asset with ID={asset_id} in {self}.")
})
}
/// Generate cacheable action that downloads asset with a given id.
pub fn download_asset_job(&self, asset_id: AssetId) -> DownloadFile {
let path = iformat!("/repos/{self.owner()}/{self.name()}/releases/assets/{asset_id}");
// Unwrap will work, because we are appending relative URL constant.
@ -285,11 +353,13 @@ impl<R: IsRepo> Handle<R> {
}
}
/// Make an HTTP request to download a release asset with a given id.
#[tracing::instrument(name="Download the asset.", fields(self=%self), err)]
pub async fn download_asset(&self, asset_id: AssetId) -> Result<Response> {
self.download_asset_job(asset_id).send_request().await
}
/// Download a release asset with a given id to a file.
#[tracing::instrument(name="Download the asset to a file.", skip(output_path), fields(self=%self, dest=%output_path.as_ref().display()), err)]
pub async fn download_asset_as(
&self,
@ -300,6 +370,9 @@ impl<R: IsRepo> Handle<R> {
crate::io::web::stream_response_to_file(response, &output_path).await
}
/// Download a release asset with a given id to a file under the given directory.
///
/// The file name is taken from the asset's name.
#[tracing::instrument(name="Download the asset to a directory.",
skip(output_dir, asset),
fields(self=%self, dest=%output_dir.as_ref().display(), id = %asset.id),
@ -341,4 +414,10 @@ impl<R: IsRepo> Handle<R> {
let default_branch = self.default_branch().await?;
crate::github::workflow::dispatch(self, workflow_id, default_branch, inputs).await
}
/// Get a handle for dealing with a release with a given id.
pub fn release_handle(&self, id: ReleaseId) -> crate::github::release::Handle {
let repo = Repo::new(self.owner(), self.name());
crate::github::release::Handle::new(&self.octocrab, repo, id)
}
}

View File

@ -1,58 +0,0 @@
use crate::prelude::*;
use crate::fs::create_dir_if_missing;
#[async_trait]
pub trait Goodie {
// Assumed to be unique among types.
// TODO requirement should be lifted, consider something safer
const NAME: &'static str;
type Instance: Instance + Sized;
async fn is_already_available(&self) -> anyhow::Result<bool>;
async fn lookup(&self, database: &GoodieDatabase) -> Result<Self::Instance>;
async fn install(&self, database: &GoodieDatabase) -> Result<Self::Instance>;
}
pub trait Instance {
fn add_to_environment(&self) -> anyhow::Result<()>;
}
#[derive(Clone, Debug)]
pub struct GoodieDatabase {
pub root_directory: PathBuf,
}
impl GoodieDatabase {
pub fn new() -> anyhow::Result<Self> {
let home = dirs::home_dir().ok_or_else(|| anyhow!("Cannot figure out home directory."))?;
let path = home.join(".enso-ci");
create_dir_if_missing(&path)?;
Ok(GoodieDatabase { root_directory: path })
}
pub async fn require(&self, goodie: &impl Goodie) -> Result {
if goodie.is_already_available().await? {
Ok(())
} else if let Ok(instance) = goodie.lookup(self).await {
instance.add_to_environment()
} else {
let instance = goodie.install(self).await?;
instance.add_to_environment()
}
}
pub fn find_dir(&self, directory_name: impl AsRef<Path>) -> Result<PathBuf> {
let expected_dir_name = directory_name.as_ref();
for entry in crate::fs::read_dir(&self.root_directory)? {
let entry = entry?;
if entry.file_type()?.is_dir() && entry.path().file_name().contains(&expected_dir_name)
{
return Ok(entry.path());
}
}
bail!("no directory by name {} in the database.", expected_dir_name.display())
}
}

View File

@ -1,2 +1,8 @@
// pub mod musl;
// pub mod sbt;
//! Top-level module for [goodie](`crate::Goodie`) implementations.
// ==============
// === Export ===
// ==============
pub mod shader_tools;

View File

@ -0,0 +1,80 @@
//! Shader Tools is our collection of tools for working with shaders.
//!
//! The included programs are:
//! * [glslc](Glslc);
//! * [spirv-opt](SpirvOpt);
//! * [spirv-cross](SpirvCross).
//!
//! This module only deals with downloading and activating the tools. The code for building and
//! uploading the tools package is in the `enso-build-shader-tools` crate.
use crate::prelude::*;
use crate::cache::goodie;
use crate::cache::Cache;
use crate::cache::Goodie;
use crate::env::known::PATH;
use crate::github::RepoRef;
use crate::programs::shaderc::Glslc;
use crate::programs::shaderc::SpirvOpt;
use crate::programs::spirv_cross::SpirvCross;
// =================
// === Constants ===
// =================
/// Repository where we store releases of the shader tools.
pub const SHADER_TOOLS_REPO: RepoRef = RepoRef { owner: "enso-org", name: "shader-tools" };
/// Version of the shader tools package that we download.
pub const VERSION: Version = Version::new(0, 1, 0);
// =========================
// === Asset description ===
// =========================
pub fn asset_name(os: OS) -> String {
// At the moment we don't have non-x64 binaries, so we can hardcode the architecture.
let arch = Arch::X86_64;
format!("shader-tools-{os}-{arch}.tar.gz")
}
// =========================
// === Goodie definition ===
// =========================
#[derive(Clone, Copy, Debug, Default)]
pub struct ShaderTools;
impl Goodie for ShaderTools {
fn get(&self, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
let url = SHADER_TOOLS_REPO.url().and_then(|url_base| {
let asset = asset_name(TARGET_OS);
let suffix = format!("releases/download/{VERSION}/{asset}");
url_base
.join(&suffix)
.with_context(|| "Failed to append suffix {suffix} to URL {url_base}")
});
goodie::download_try_url(url, cache)
}
fn is_active(&self) -> BoxFuture<'static, Result<bool>> {
async move {
let glslc = Glslc.lookup();
let spirv_cross = SpirvCross.lookup();
let spirv_opt = SpirvOpt.lookup();
Ok(glslc.is_ok() && spirv_cross.is_ok() && spirv_opt.is_ok())
}
.boxed()
}
fn activation_env_changes(&self, package_path: &Path) -> Result<Vec<crate::env::Modification>> {
let path = package_path.join_iter(["bin"]);
let path = crate::env::Modification::prepend_path(&PATH, path);
Ok(vec![path])
}
}

View File

@ -1,6 +1,7 @@
use crate::prelude::*;
use crate::fs::tokio::copy_to_file;
use crate::fs::tokio::create_parent_dir_if_missing;
use anyhow::Context;
use reqwest::Client;
@ -97,6 +98,8 @@ pub async fn stream_to_file(
stream: impl Stream<Item = reqwest::Result<Bytes>>,
output_path: impl AsRef<Path>,
) -> Result {
debug!("Streaming download to file {}. ", output_path.as_ref().display());
create_parent_dir_if_missing(&output_path).await?;
let output = tokio::fs::OpenOptions::new().write(true).create(true).open(&output_path).await?;
stream
.map_err(anyhow::Error::from)

View File

@ -21,6 +21,8 @@ pub async fn download(
client: &Client,
url: impl IntoUrl,
) -> Result<impl Stream<Item = reqwest::Result<Bytes>>> {
let url = url.into_url()?;
debug!("Downloading {url}.");
Ok(client.get(url).send().await?.error_for_status()?.bytes_stream())
}

View File

@ -1,4 +1,5 @@
// === Features ===
#![feature(try_blocks)]
#![feature(result_flattening)]
#![feature(const_fmt_arguments_new)]
#![feature(hash_set_entry)]
@ -53,7 +54,6 @@ pub mod fs;
pub mod future;
pub mod github;
pub mod global;
pub mod goodie;
pub mod goodies;
pub mod io;
pub mod log;
@ -96,10 +96,9 @@ pub mod prelude {
pub use crate::EMPTY_REQUEST_BODY;
pub use crate::extensions::output::OutputExt as _;
pub use crate::cache::goodie::Goodie;
pub use crate::github::release::IsRelease;
pub use crate::github::repo::IsRepo;
pub use crate::goodie::Goodie;
pub use crate::log::setup_logging;
pub use crate::os::target::TARGET_ARCH;
pub use crate::os::target::TARGET_OS;
@ -110,12 +109,12 @@ pub mod prelude {
pub use crate::program::Program;
pub use crate::program::Shell;
pub use crate::cache::goodie::GoodieExt as _;
pub use crate::env::accessor::RawVariable as _;
pub use crate::env::accessor::TypedVariable as _;
pub use crate::extensions::clap::ArgExt as _;
pub use crate::extensions::command::CommandExt as _;
pub use crate::extensions::output::OutputExt as _;
pub use crate::extensions::version::PrereleaseExt as _;
pub use crate::extensions::version::VersionExt as _;
pub use crate::github::release::IsReleaseExt as _;

View File

@ -451,7 +451,7 @@ pub fn spawn_log_processor(
) -> JoinHandle<Result> {
tokio::task::spawn(
async move {
info!("{prefix} <START>");
trace!("{prefix} <START>");
let bufread = BufReader::new(out);
let mut lines = bufread.split(b'\n');
while let Some(line_bytes) = lines.next_segment().await? {
@ -470,7 +470,7 @@ pub fn spawn_log_processor(
}
}
}
info!("{prefix} <ENDUT>");
trace!("{prefix} <ENDUT>");
Result::Ok(())
}
.inspect_err(|e| error!("Fatal error while processing process output: {e}")),

View File

@ -6,6 +6,7 @@ use crate::prelude::*;
// ==============
pub mod cargo;
pub mod cmake;
pub mod cmd;
pub mod conda;
pub mod docker;
@ -25,6 +26,9 @@ pub mod rustup;
pub mod sbt;
pub mod seven_zip;
pub mod sh;
pub mod shaderc;
pub mod spirv_cross;
pub mod strip;
pub mod tar;
pub mod vs;
pub mod vswhere;
@ -32,6 +36,7 @@ pub mod wasm_opt;
pub mod wasm_pack;
pub use cargo::Cargo;
pub use cmake::CMake;
pub use cmd::Cmd;
pub use conda::Conda;
pub use docker::Docker;
@ -42,8 +47,10 @@ pub use java::Java;
pub use javac::Javac;
pub use node::Node;
pub use node::Npm;
pub use npx::Npx;
pub use pwsh::PwSh;
pub use sbt::Sbt;
pub use seven_zip::SevenZip;
pub use sh::Bash;
pub use strip::Strip;
pub use wasm_pack::WasmPack;

View File

@ -0,0 +1,187 @@
//! Wrappers for [CMake program](https://cmake.org/) and its commands.
use crate::prelude::*;
use crate::program::command::Manipulator;
// =====================
// === Configuration ===
// =====================
/// Standard build configurations defined by CMake.
///
/// See [CMake documentation](https://cmake.org/cmake/help/latest/manual/cmake-buildsystem.7.html#default-and-custom-configurations)
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Display, Eq, Hash, PartialEq)]
pub enum Configuration {
/// Non-optimized, debuggable binary.
Debug,
/// Optimized binary.
Release,
/// Add compiler flags for generating debug information (the -g flag for GCC / clang), and will
/// result in debuggable, yet much larger binaries.
RelWithDebInfo,
/// Add compiler flags for generating more compact binaries (the -Os flag for GCC / clang),
/// possibly on the expense of program speed.
MinSizeRel,
}
// ===================
// === CLI Options ===
// ===================
/// Define build type for a single configuration generator.
///
/// See <https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html>.
pub fn build_type(config: Configuration) -> SetVariable {
SetVariable::string("CMAKE_BUILD_TYPE", config.to_string())
}
/// Option that can be passed to `cmake --build` command.
#[derive(Clone, Copy, Debug)]
pub enum BuildOption {
/// The maximum number of concurrent processes to use when building. If value is omitted the
/// native build tool's default number is used.
Parallel(Option<u32>),
/// For multi-configuration tools, choose configuration.
///
/// Single configuration tools will ignore this option, instead [`build_type`] should be used
/// during the generation phase.
Configuration(Configuration),
}
impl BuildOption {
/// Enable default level of parallelism.
pub fn parallel() -> Self {
Self::Parallel(None)
}
}
impl Manipulator for BuildOption {
fn apply<C: IsCommandWrapper + ?Sized>(&self, command: &mut C) {
match self {
BuildOption::Parallel(jobs) => {
command.arg("--parallel");
if let Some(jobs) = jobs {
command.arg(jobs.to_string());
}
}
BuildOption::Configuration(config) => {
command.arg("--config");
command.arg(config.to_string());
}
}
}
}
/// Options for `cmake --install` family of commands.
#[derive(Clone, Debug)]
pub enum InstallOption {
/// Install to the given directory.
Prefix(PathBuf),
}
impl Manipulator for InstallOption {
fn apply<C: IsCommandWrapper + ?Sized>(&self, command: &mut C) {
match self {
InstallOption::Prefix(path) => {
command.arg("--prefix");
command.arg(path);
}
}
}
}
/// Defines the given variable in the CMake cache.
#[derive(Clone, Debug)]
pub struct SetVariable {
/// Variable name.
pub variable: String,
/// Variable value.
value: String,
}
impl SetVariable {
fn new(variable: impl Into<String>, value: impl Into<String>) -> Self {
let variable = variable.into();
let value = value.into();
Self { variable, value }
}
/// Set given boolean option variable (`BOOL` type).
pub fn option(name: impl Into<String>, value: bool) -> Self {
Self::new(name, if value { "ON" } else { "OFF" })
}
/// Set given file path variable (`FILEPATH` type).
pub fn file_path(name: impl Into<String>, value: impl AsRef<Path>) -> Self {
Self::new(name, value.as_ref().as_str())
}
/// Set directory path variable (`PATH` type).
pub fn directory_path(name: impl Into<String>, value: impl AsRef<Path>) -> Self {
Self::new(name, value.as_ref().as_str())
}
/// Set given string variable (`STRING` type).
pub fn string(name: impl Into<String>, value: impl Into<String>) -> Self {
Self::new(name, value)
}
}
impl Manipulator for SetVariable {
fn apply<C: IsCommandWrapper + ?Sized>(&self, command: &mut C) {
command.arg("-D").arg(format!("{}={}", self.variable, self.value));
}
}
// ===============
// === Program ===
// ===============
/// The [CMake program](https://cmake.org/).
///
/// See [the official CLI documentation](https://cmake.org/cmake/help/latest/manual/cmake.1.html).
#[derive(Clone, Debug, Copy)]
pub struct CMake;
impl Program for CMake {
fn executable_name(&self) -> &str {
"cmake"
}
}
// ===========================
// === Helper Entry Points ===
// ===========================
/// Generate build files for the given project.
#[context("Failed to generate build files for {}.", source_dir.as_ref().display())]
pub fn generate(source_dir: impl AsRef<Path>, build_dir: impl AsRef<Path>) -> Result<Command> {
Ok(CMake.cmd()?.with_arg(source_dir.as_ref()).with_current_dir(build_dir.as_ref()))
}
/// Build the project. The build_dir must be the same as the one used in the [generation
/// step](generate).
pub fn build(build_dir: impl AsRef<Path>) -> Result<Command> {
Ok(CMake
.cmd()?
.with_arg("--build")
.with_arg(".")
.with_applied(&BuildOption::parallel())
.with_current_dir(&build_dir))
}
/// Install the project. The build_dir must be the same as the one used in the [generation
/// step](generate), and [`build`] should have been called before.
pub fn install(build_dir: impl AsRef<Path>, prefix_dir: impl AsRef<Path>) -> Result<Command> {
Ok(CMake
.cmd()?
.with_arg("--install")
.with_arg(".")
.with_applied(&InstallOption::Prefix(prefix_dir.as_ref().to_path_buf()))
.with_current_dir(&build_dir))
}

View File

@ -54,6 +54,15 @@ impl Git {
repository_root: path.as_ref().to_path_buf(),
})
}
/// Clone a repository into a new directory.
#[context("Failed to clone git repository {} into {}.", url.as_str(), path.as_ref().display())]
pub async fn clone(&self, path: impl AsRef<Path>, url: &Url) -> Result<Context> {
let path = path.as_ref();
crate::fs::tokio::create_dir_if_missing(path).await?;
self.cmd()?.arg(Command::Clone).arg(url.as_str()).arg(path).run_ok().await?;
Context::new(path).await
}
}
/// The wrapper over `Git` program invocation context.
@ -85,10 +94,7 @@ impl Context {
///
/// The caller is responsible for ensuring that the `working_dir` is a subdirectory of the
/// `repository_root`.
pub async fn new_unchecked(
repository_root: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
) -> Self {
pub fn new_unchecked(repository_root: impl AsRef<Path>, working_dir: impl AsRef<Path>) -> Self {
Self {
repository_root: repository_root.as_ref().to_path_buf(),
working_dir: working_dir.as_ref().to_path_buf(),
@ -255,8 +261,12 @@ impl GitCommand {
}
/// A top-level command for git.
///
/// The full reference is available in the [official docs](https://git-scm.com/docs/git#_git_commands).
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Command {
/// Clone a repository into a new directory.
Clone,
/// Remove untracked files from the working tree.
Clean,
/// Show changes between commits, commit and working tree, etc.
@ -276,6 +286,7 @@ pub enum Command {
impl AsRef<OsStr> for Command {
fn as_ref(&self) -> &OsStr {
OsStr::new(match self {
Command::Clone => "clone",
Command::Clean => "clean",
Command::Diff => "diff",
Command::Init => "init",

View File

@ -0,0 +1,36 @@
//! A collection of tools, libraries, and tests for Vulkan shader compilation.
//!
//! See the [GitHub repository](https://github.com/google/shaderc) for more information.
use crate::prelude::*;
// =============
// === glslc ===
// =============
/// A command-line GLSL/HLSL to SPIR-V compiler with Clang-compatible arguments.
#[derive(Clone, Copy, Debug, Default)]
pub struct Glslc;
impl Program for Glslc {
fn executable_name(&self) -> &'static str {
"glslc"
}
}
// =================
// === spirv-opt ===
// =================
/// SPIR-V Optimizer.
#[derive(Clone, Copy, Debug, Default)]
pub struct SpirvOpt;
impl Program for SpirvOpt {
fn executable_name(&self) -> &'static str {
"spirv-opt"
}
}

View File

@ -0,0 +1,16 @@
//! Wrapper for [spirv-cross](https://github.com/KhronosGroup/SPIRV-Cross).
use crate::prelude::Program;
/// SPIRV-Cross is a practical tool and library for performing reflection on SPIR-V and
/// disassembling SPIR-V back to high level languages.
#[derive(Clone, Copy, Debug, Default)]
pub struct SpirvCross;
impl Program for SpirvCross {
fn executable_name(&self) -> &'static str {
"spirv-cross"
}
}

View File

@ -0,0 +1,13 @@
use crate::prelude::*;
/// Program that discards symbols and other data from object files.
#[derive(Debug, Clone, Copy)]
pub struct Strip;
impl Program for Strip {
fn executable_name(&self) -> &str {
"strip"
}
}

View File

@ -121,3 +121,33 @@ pub mod via_string {
T::from_str(&text).map_err(D::Error::custom)
}
}
/// Like [`via_string`] but for optional values. If the string is not present, `None` is recognized.
pub mod via_string_opt {
use super::*;
/// Serializer, that uses [`Display`] trait.
pub fn serialize<S, T>(value: &Option<T>, ser: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
T: Display, {
if let Some(value) = value {
ser.collect_str(value)
} else {
ser.serialize_none()
}
}
/// Deserializer, that uses [`FromString`] trait.
pub fn deserialize<'de, D, T>(de: D) -> std::result::Result<Option<T>, D::Error>
where
D: Deserializer<'de>,
T: FromString, {
let text = Option::<String>::deserialize(de)?;
if let Some(text) = text {
T::from_str(&text).map(Some).map_err(D::Error::custom)
} else {
Ok(None)
}
}
}

View File

@ -18,12 +18,10 @@ futures-util = "0.3.17"
glob = "0.3.0"
humantime = "2.1.0"
ide-ci = { path = "../ci_utils" }
octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [
"rustls"
] }
octocrab = { workspace = true }
serde = { version = "1.0.130", features = ["derive"] }
serde_json = "1.0.68"
serde_yaml = "0.9.10"
serde_yaml = { workspace = true }
strum = { version = "0.24.0", features = ["derive"] }
tempfile = "3.2.0"
tokio = { workspace = true }

View File

@ -133,8 +133,6 @@ pub enum Target {
Fmt,
/// Release-related subcommand.
Release(release::Target),
/// Regenerate GitHub Actions workflows.
CiGen,
/// Regenerate `syntax2` library (new parser).
JavaGen(java_gen::Target),
/// Check if the changelog has been updated. Requires CI environment.

View File

@ -6,7 +6,7 @@
use enso_build_cli::prelude::*;
use enso_build::setup_octocrab;
use ide_ci::github::setup_octocrab;
use ide_ci::github::RepoRef;
use ide_ci::io::web::handle_error_response;
use ide_ci::log::setup_logging;

Some files were not shown because too many files have changed in this diff Show More