mirror of
https://github.com/enso-org/enso.git
synced 2024-12-22 10:11:37 +03:00
Apply unified prettier style to engine codebase (#3145)
This commit is contained in:
parent
83e35751f4
commit
8fc51bfe44
3
.github/settings.yml
vendored
3
.github/settings.yml
vendored
@ -81,8 +81,7 @@ labels:
|
||||
description: A change that will break a public API or user-facing behaviour
|
||||
- name: "Change: Non-Breaking"
|
||||
color: "#ffdce5"
|
||||
description:
|
||||
A change that will not break a public API or user-facing behaviour
|
||||
description: A change that will not break a public API or user-facing behaviour
|
||||
|
||||
- name: "Difficulty: Beginner"
|
||||
color: "#d1e9c4"
|
||||
|
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@ -26,7 +26,7 @@ jobs:
|
||||
- name: Install Prettier
|
||||
run: npm install
|
||||
- name: Check Docs
|
||||
run: npx prettier --check .
|
||||
run: npx prettier --version && npx prettier --check .
|
||||
|
||||
changelog-check:
|
||||
name: Changelog Check
|
||||
|
10
.github/workflows/gui.yml
vendored
10
.github/workflows/gui.yml
vendored
@ -118,16 +118,10 @@ jobs:
|
||||
with:
|
||||
toolchain: nightly-2021-10-29
|
||||
override: true
|
||||
- name: Install Prettier
|
||||
run: npm install --save-dev --save-exact prettier
|
||||
- name: Install Clippy
|
||||
run: rustup component add clippy
|
||||
- name: Install Clippy
|
||||
run: rustup component add rustfmt
|
||||
- name: Lint Markdown sources
|
||||
run: npx prettier --check '*.md'
|
||||
- name: Lint JavaScript sources
|
||||
run: npx prettier --check 'src/**/*.js'
|
||||
- name: Lint Rust sources
|
||||
run: node ./run lint --skip-version-validation
|
||||
test:
|
||||
@ -462,10 +456,6 @@ jobs:
|
||||
if [[ ${{ steps.checkCurrentReleaseTag.outputs.exists }} == true ]];
|
||||
then exit 1; fi
|
||||
if: github.base_ref == 'unstable' || github.base_ref == 'stable'
|
||||
- name: Install Prettier
|
||||
run: npm install --save-dev --save-exact prettier
|
||||
- name: Pretty print changelog.
|
||||
run: npx prettier --prose-wrap never CHANGELOG.md --write
|
||||
- name: Upload GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
|
39
.github/workflows/nightly.yml
vendored
39
.github/workflows/nightly.yml
vendored
@ -29,8 +29,7 @@ jobs:
|
||||
name: Nightly Preflight Check
|
||||
runs-on: ubuntu-18.04
|
||||
timeout-minutes: 10
|
||||
if:
|
||||
"${{ github.event_name == 'schedule' ||
|
||||
if: "${{ github.event_name == 'schedule' ||
|
||||
contains(github.event.head_commit.message,'[release: nightly]') }}"
|
||||
outputs:
|
||||
proceed: ${{ steps.preparations.outputs.proceed }}
|
||||
@ -452,8 +451,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
}}-linux-amd64.tar.gz
|
||||
asset_name: enso-engine-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -463,8 +461,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
}}-macos-amd64.tar.gz
|
||||
asset_name: enso-engine-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -474,8 +471,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
}}-windows-amd64.zip
|
||||
asset_name: enso-engine-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_content_type: application/zip
|
||||
@ -486,8 +482,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
}}-linux-amd64.tar.gz
|
||||
asset_name: enso-launcher-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -497,8 +492,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
}}-macos-amd64.tar.gz
|
||||
asset_name: enso-launcher-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -508,8 +502,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
}}-windows-amd64.zip
|
||||
asset_name: enso-launcher-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_content_type: application/zip
|
||||
@ -523,8 +516,7 @@ jobs:
|
||||
asset_path:
|
||||
repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION
|
||||
}}-linux-amd64.tar.gz
|
||||
asset_name:
|
||||
enso-project-manager-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_name: enso-project-manager-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
- name: Publish the Project Manager (MacOS)
|
||||
uses: actions/upload-release-asset@v1
|
||||
@ -535,8 +527,7 @@ jobs:
|
||||
asset_path:
|
||||
repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION
|
||||
}}-macos-amd64.tar.gz
|
||||
asset_name:
|
||||
enso-project-manager-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_name: enso-project-manager-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
- name: Publish the Project Manager (Windows)
|
||||
uses: actions/upload-release-asset@v1
|
||||
@ -547,8 +538,7 @@ jobs:
|
||||
asset_path:
|
||||
repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION
|
||||
}}-windows-amd64.zip
|
||||
asset_name:
|
||||
enso-project-manager-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_name: enso-project-manager-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_content_type: application/zip
|
||||
|
||||
- name: Publish the Bundle (Linux)
|
||||
@ -557,8 +547,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
}}-linux-amd64.tar.gz
|
||||
asset_name: enso-bundle-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -568,8 +557,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
}}-macos-amd64.tar.gz
|
||||
asset_name: enso-bundle-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -579,8 +567,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
}}-windows-amd64.zip
|
||||
asset_name: enso-bundle-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_content_type: application/zip
|
||||
|
36
.github/workflows/release.yml
vendored
36
.github/workflows/release.yml
vendored
@ -466,8 +466,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
}}-linux-amd64.tar.gz
|
||||
asset_name: enso-engine-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -477,8 +476,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
}}-macos-amd64.tar.gz
|
||||
asset_name: enso-engine-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -488,8 +486,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION
|
||||
}}-windows-amd64.zip
|
||||
asset_name: enso-engine-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_content_type: application/zip
|
||||
@ -500,8 +497,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
}}-linux-amd64.tar.gz
|
||||
asset_name: enso-launcher-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -511,8 +507,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
}}-macos-amd64.tar.gz
|
||||
asset_name: enso-launcher-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -522,8 +517,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION
|
||||
}}-windows-amd64.zip
|
||||
asset_name: enso-launcher-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_content_type: application/zip
|
||||
@ -537,8 +531,7 @@ jobs:
|
||||
asset_path:
|
||||
repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION
|
||||
}}-linux-amd64.tar.gz
|
||||
asset_name:
|
||||
enso-project-manager-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_name: enso-project-manager-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
- name: Publish the Project Manager (MacOS)
|
||||
uses: actions/upload-release-asset@v1
|
||||
@ -549,8 +542,7 @@ jobs:
|
||||
asset_path:
|
||||
repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION
|
||||
}}-macos-amd64.tar.gz
|
||||
asset_name:
|
||||
enso-project-manager-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_name: enso-project-manager-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
- name: Publish the Project Manager (Windows)
|
||||
uses: actions/upload-release-asset@v1
|
||||
@ -561,8 +553,7 @@ jobs:
|
||||
asset_path:
|
||||
repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION
|
||||
}}-windows-amd64.zip
|
||||
asset_name:
|
||||
enso-project-manager-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_name: enso-project-manager-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_content_type: application/zip
|
||||
|
||||
- name: Publish the Bundle (Linux)
|
||||
@ -571,8 +562,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
}}-linux-amd64.tar.gz
|
||||
asset_name: enso-bundle-${{ env.DIST_VERSION }}-linux-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -582,8 +572,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
}}-macos-amd64.tar.gz
|
||||
asset_name: enso-bundle-${{ env.DIST_VERSION }}-macos-amd64.tar.gz
|
||||
asset_content_type: application/x-tar
|
||||
@ -593,8 +582,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path:
|
||||
repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION
|
||||
}}-windows-amd64.zip
|
||||
asset_name: enso-bundle-${{ env.DIST_VERSION }}-windows-amd64.zip
|
||||
asset_content_type: application/zip
|
||||
|
3
.github/workflows/rust.yml
vendored
3
.github/workflows/rust.yml
vendored
@ -160,8 +160,7 @@ jobs:
|
||||
WASMPACKURL:
|
||||
https://github.com/rustwasm/wasm-pack/releases/download/v${{
|
||||
env.wasmpackVersion }}
|
||||
WASMPACKDIR:
|
||||
wasm-pack-v${{ env.wasmpackVersion }}-x86_64-unknown-linux-musl
|
||||
WASMPACKDIR: wasm-pack-v${{ env.wasmpackVersion }}-x86_64-unknown-linux-musl
|
||||
shell: bash
|
||||
run: |
|
||||
curl -L "$WASMPACKURL/$WASMPACKDIR.tar.gz" | tar -xz -C .
|
||||
|
@ -13,5 +13,8 @@ distribution/lib/Standard/Database/*/THIRD-PARTY
|
||||
built-distribution/
|
||||
THIRD-PARTY
|
||||
|
||||
# GUI
|
||||
gui/dist/
|
||||
**/scala-parser.js
|
||||
**/package-lock.json
|
||||
**/msdfgen_wasm.js
|
||||
|
@ -118,11 +118,6 @@ let installNode = {
|
||||
},
|
||||
}
|
||||
|
||||
let installPrettier = {
|
||||
name: 'Install Prettier',
|
||||
run: 'npm install --save-dev --save-exact prettier',
|
||||
}
|
||||
|
||||
let installClippy = {
|
||||
name: 'Install Clippy',
|
||||
run: 'rustup component add clippy',
|
||||
@ -192,16 +187,6 @@ let buildPackage = {
|
||||
},
|
||||
}
|
||||
|
||||
let lintMarkdown = {
|
||||
name: 'Lint Markdown sources',
|
||||
run: "npx prettier --check '*.md'",
|
||||
}
|
||||
|
||||
let lintJavaScript = {
|
||||
name: 'Lint JavaScript sources',
|
||||
run: "npx prettier --check 'src/**/*.js'",
|
||||
}
|
||||
|
||||
let lintRust = {
|
||||
name: 'Lint Rust sources',
|
||||
run: 'node ./run lint --skip-version-validation',
|
||||
@ -328,11 +313,6 @@ let assertChangelogWasUpdated = [
|
||||
// ======================
|
||||
|
||||
let uploadGitHubRelease = [
|
||||
installPrettier,
|
||||
{
|
||||
name: `Pretty print changelog.`,
|
||||
run: 'npx prettier --prose-wrap never CHANGELOG.md --write',
|
||||
},
|
||||
{
|
||||
name: `Upload GitHub Release`,
|
||||
uses: 'softprops/action-gh-release@v1',
|
||||
@ -467,11 +447,8 @@ let workflow = {
|
||||
installNode,
|
||||
installTypeScript,
|
||||
installRust,
|
||||
installPrettier,
|
||||
installClippy,
|
||||
installFmt,
|
||||
lintMarkdown,
|
||||
lintJavaScript,
|
||||
lintRust,
|
||||
]),
|
||||
test: job_on_linux_cached('test_native', 'Native Tests', [
|
||||
|
@ -1,57 +1,53 @@
|
||||
import { StaticNavigation } from "components/navigation";
|
||||
import {
|
||||
Container,
|
||||
ContainerOrScreenIfSmall,
|
||||
RootContainer,
|
||||
} from "components/container";
|
||||
import { Header } from "components/header";
|
||||
import { Chapter } from "components/chapter";
|
||||
import { SectionCommunity } from "components/section-community";
|
||||
import { SectionFooter } from "components/section-footer";
|
||||
import { StickyButtons } from "components/sticky-buttons";
|
||||
import { StaticNavigation } from 'components/navigation'
|
||||
import { Container, ContainerOrScreenIfSmall, RootContainer } from 'components/container'
|
||||
import { Header } from 'components/header'
|
||||
import { Chapter } from 'components/chapter'
|
||||
import { SectionCommunity } from 'components/section-community'
|
||||
import { SectionFooter } from 'components/section-footer'
|
||||
import { StickyButtons } from 'components/sticky-buttons'
|
||||
|
||||
import AtomsIcon from "../../../public/img/icon/atoms.svg";
|
||||
import MethodsIcon from "../../../public/img/icon/methods.svg";
|
||||
import SubmodulesIcon from "../../../public/img/icon/submodules.svg";
|
||||
import AtomsIcon from '../../../public/img/icon/atoms.svg'
|
||||
import MethodsIcon from '../../../public/img/icon/methods.svg'
|
||||
import SubmodulesIcon from '../../../public/img/icon/submodules.svg'
|
||||
|
||||
function Docs() {
|
||||
return (
|
||||
<div>
|
||||
{/*<div className="breadcrumb-panel">*/}
|
||||
{/* <Container>/!*BREADCRUMBS2*!/</Container>*/}
|
||||
{/*</div>*/}
|
||||
<Container>
|
||||
<div className="root">
|
||||
<div className="toc">{/*BREADCRUMBS*/}</div>
|
||||
{/*PAGE*/}
|
||||
return (
|
||||
<div>
|
||||
{/*<div className="breadcrumb-panel">*/}
|
||||
{/* <Container>/!*BREADCRUMBS2*!/</Container>*/}
|
||||
{/*</div>*/}
|
||||
<Container>
|
||||
<div className="root">
|
||||
<div className="toc">{/*BREADCRUMBS*/}</div>
|
||||
{/*PAGE*/}
|
||||
</div>
|
||||
</Container>
|
||||
</div>
|
||||
</Container>
|
||||
</div>
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
export default function Main(props) {
|
||||
return (
|
||||
<RootContainer className="theme-light">
|
||||
<Header />
|
||||
return (
|
||||
<RootContainer className="theme-light">
|
||||
<Header />
|
||||
|
||||
<Chapter id="home" noSpacing="true">
|
||||
<div className="bg-lang-bg">
|
||||
<StaticNavigation dark="true" />
|
||||
</div>
|
||||
</Chapter>
|
||||
<div className="doc">
|
||||
<Docs />
|
||||
</div>
|
||||
<Chapter id="community">
|
||||
<SectionCommunity />
|
||||
</Chapter>
|
||||
<Chapter id="home" noSpacing="true">
|
||||
<div className="bg-lang-bg">
|
||||
<StaticNavigation dark="true" />
|
||||
</div>
|
||||
</Chapter>
|
||||
<div className="doc">
|
||||
<Docs />
|
||||
</div>
|
||||
<Chapter id="community">
|
||||
<SectionCommunity />
|
||||
</Chapter>
|
||||
|
||||
<StickyButtons />
|
||||
<StickyButtons />
|
||||
|
||||
<Chapter id="footer">
|
||||
<SectionFooter />
|
||||
</Chapter>
|
||||
</RootContainer>
|
||||
);
|
||||
<Chapter id="footer">
|
||||
<SectionFooter />
|
||||
</Chapter>
|
||||
</RootContainer>
|
||||
)
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"devDependencies": {
|
||||
"prettier": "2.0.5"
|
||||
"prettier": "2.4.1"
|
||||
}
|
||||
}
|
||||
|
@ -1,63 +1,56 @@
|
||||
const fs = require("fs");
|
||||
const proc = require("child_process");
|
||||
const fs = require('fs')
|
||||
const proc = require('child_process')
|
||||
|
||||
const skipChangelogInfix = "[no-changelog]";
|
||||
const changelogPath = process.argv[2];
|
||||
const baseRef = process.argv[3];
|
||||
const skipChangelogInfix = '[no-changelog]'
|
||||
const changelogPath = process.argv[2]
|
||||
const baseRef = process.argv[3]
|
||||
|
||||
/// Runs the git command with the provided arguments.
|
||||
function runGit(args) {
|
||||
const result = proc.spawnSync("git", args);
|
||||
if (result.error) {
|
||||
console.log("Cannot access git", result.error);
|
||||
process.exit(1);
|
||||
}
|
||||
return result;
|
||||
const result = proc.spawnSync('git', args)
|
||||
if (result.error) {
|
||||
console.log('Cannot access git', result.error)
|
||||
process.exit(1)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/** Checks if the changelog file was changed in any commits that are part of the
|
||||
* PR.
|
||||
*/
|
||||
function wasChangelogModified() {
|
||||
const diffArgs = [
|
||||
"--no-pager",
|
||||
"diff",
|
||||
"--exit-code",
|
||||
baseRef,
|
||||
"--",
|
||||
changelogPath,
|
||||
];
|
||||
const diffArgs = ['--no-pager', 'diff', '--exit-code', baseRef, '--', changelogPath]
|
||||
|
||||
const result = runGit(diffArgs);
|
||||
const exitCode = result.status;
|
||||
console.log(result.stdout.toString("utf-8"));
|
||||
const noDifference = exitCode == 0;
|
||||
return !noDifference;
|
||||
const result = runGit(diffArgs)
|
||||
const exitCode = result.status
|
||||
console.log(result.stdout.toString('utf-8'))
|
||||
const noDifference = exitCode == 0
|
||||
return !noDifference
|
||||
}
|
||||
|
||||
/// Checks if any commit has overridden the changelog check.
|
||||
function isChangelogSkipped() {
|
||||
const logArgs = ["--no-pager", "log", "HEAD~3...HEAD", "--pretty=oneline"];
|
||||
const result = runGit(logArgs);
|
||||
const logArgs = ['--no-pager', 'log', 'HEAD~3...HEAD', '--pretty=oneline']
|
||||
const result = runGit(logArgs)
|
||||
|
||||
const output = result.stdout.toString("utf-8");
|
||||
const containsSkipCommit = output.indexOf(skipChangelogInfix) >= 0;
|
||||
return containsSkipCommit;
|
||||
const output = result.stdout.toString('utf-8')
|
||||
const containsSkipCommit = output.indexOf(skipChangelogInfix) >= 0
|
||||
return containsSkipCommit
|
||||
}
|
||||
|
||||
if (wasChangelogModified()) {
|
||||
console.log("Changelog was changed");
|
||||
process.exit(0);
|
||||
console.log('Changelog was changed')
|
||||
process.exit(0)
|
||||
} else {
|
||||
console.log("No changes to the changelog");
|
||||
if (isChangelogSkipped()) {
|
||||
console.log(
|
||||
"But one of the commits within the PR includes " +
|
||||
skipChangelogInfix +
|
||||
", so the check is skipped."
|
||||
);
|
||||
process.exit(0);
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('No changes to the changelog')
|
||||
if (isChangelogSkipped()) {
|
||||
console.log(
|
||||
'But one of the commits within the PR includes ' +
|
||||
skipChangelogInfix +
|
||||
', so the check is skipped.'
|
||||
)
|
||||
process.exit(0)
|
||||
} else {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
@ -1,17 +1,14 @@
|
||||
const fs = require("fs");
|
||||
const fs = require('fs')
|
||||
|
||||
const path = "build.sbt";
|
||||
const version = process.argv[2];
|
||||
const edition = process.argv[3];
|
||||
const path = 'build.sbt'
|
||||
const version = process.argv[2]
|
||||
const edition = process.argv[3]
|
||||
|
||||
const content = fs.readFileSync(path, { encoding: "utf-8" });
|
||||
const content = fs.readFileSync(path, { encoding: 'utf-8' })
|
||||
const updated = content
|
||||
.replace(/val ensoVersion.*= ".*"/, 'val ensoVersion = "' + version + '"')
|
||||
.replace(
|
||||
/val currentEdition.*= ".*"/,
|
||||
'val currentEdition = "' + edition + '"'
|
||||
);
|
||||
fs.writeFileSync(path, updated);
|
||||
.replace(/val ensoVersion.*= ".*"/, 'val ensoVersion = "' + version + '"')
|
||||
.replace(/val currentEdition.*= ".*"/, 'val currentEdition = "' + edition + '"')
|
||||
fs.writeFileSync(path, updated)
|
||||
|
||||
console.log("Updated build version to " + version);
|
||||
console.log("Updated build edition to " + edition);
|
||||
console.log('Updated build version to ' + version)
|
||||
console.log('Updated build edition to ' + edition)
|
||||
|
@ -1,59 +1,56 @@
|
||||
const fs = require("fs");
|
||||
const fs = require('fs')
|
||||
|
||||
const inputPath = process.argv[2];
|
||||
const outputPath = process.argv[3];
|
||||
const inputPath = process.argv[2]
|
||||
const outputPath = process.argv[3]
|
||||
|
||||
console.log("Extracting release notes from " + inputPath + " to " + outputPath);
|
||||
console.log('Extracting release notes from ' + inputPath + ' to ' + outputPath)
|
||||
|
||||
/** Returns the part of the text until the second top-level heading (exclusive)
|
||||
* in Markdown formatting.
|
||||
*/
|
||||
function cutFirstSection(content) {
|
||||
const nightlySectionRegex = /^# Enso Next$/gm;
|
||||
function findNightlySectionStart(text) {
|
||||
return text.search(nightlySectionRegex);
|
||||
}
|
||||
const regularSectionRegex = /^# Enso .*? \(\d\d\d\d-\d\d-\d\d\)$/gm;
|
||||
function findFirstRegularSectionStart(text) {
|
||||
return text.search(regularSectionRegex);
|
||||
}
|
||||
function findNewline(text) {
|
||||
return text.indexOf("\n");
|
||||
}
|
||||
const nightlySectionRegex = /^# Enso Next$/gm
|
||||
function findNightlySectionStart(text) {
|
||||
return text.search(nightlySectionRegex)
|
||||
}
|
||||
const regularSectionRegex = /^# Enso .*? \(\d\d\d\d-\d\d-\d\d\)$/gm
|
||||
function findFirstRegularSectionStart(text) {
|
||||
return text.search(regularSectionRegex)
|
||||
}
|
||||
function findNewline(text) {
|
||||
return text.indexOf('\n')
|
||||
}
|
||||
|
||||
const firstHeading = findNightlySectionStart(content);
|
||||
if (firstHeading < 0) {
|
||||
throw "Could not find the nightly section, matching " + nightlySectionRegex;
|
||||
}
|
||||
const firstHeading = findNightlySectionStart(content)
|
||||
if (firstHeading < 0) {
|
||||
throw 'Could not find the nightly section, matching ' + nightlySectionRegex
|
||||
}
|
||||
|
||||
const restOffset = firstHeading + 2;
|
||||
const newLineOffset = findNewline(content.substring(restOffset));
|
||||
if (newLineOffset < 0) {
|
||||
throw "No content after the section heading";
|
||||
}
|
||||
const restStart = restOffset + newLineOffset + 1;
|
||||
const restOffset = firstHeading + 2
|
||||
const newLineOffset = findNewline(content.substring(restOffset))
|
||||
if (newLineOffset < 0) {
|
||||
throw 'No content after the section heading'
|
||||
}
|
||||
const restStart = restOffset + newLineOffset + 1
|
||||
|
||||
const rest = content.substring(restStart);
|
||||
const secondHeading = findFirstRegularSectionStart(rest);
|
||||
if (secondHeading < 0) {
|
||||
throw (
|
||||
"Could not find the first released section, matching" +
|
||||
regularSectionRegex
|
||||
);
|
||||
}
|
||||
const rest = content.substring(restStart)
|
||||
const secondHeading = findFirstRegularSectionStart(rest)
|
||||
if (secondHeading < 0) {
|
||||
throw 'Could not find the first released section, matching' + regularSectionRegex
|
||||
}
|
||||
|
||||
const firstSectionContent = rest.substring(0, secondHeading);
|
||||
return firstSectionContent;
|
||||
const firstSectionContent = rest.substring(0, secondHeading)
|
||||
return firstSectionContent
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(inputPath, { encoding: "utf-8" });
|
||||
const nightlyPart = cutFirstSection(content);
|
||||
fs.writeFileSync(outputPath, nightlyPart);
|
||||
const content = fs.readFileSync(inputPath, { encoding: 'utf-8' })
|
||||
const nightlyPart = cutFirstSection(content)
|
||||
fs.writeFileSync(outputPath, nightlyPart)
|
||||
|
||||
console.log("Created " + outputPath + " with the following content:");
|
||||
console.log(nightlyPart);
|
||||
console.log('Created ' + outputPath + ' with the following content:')
|
||||
console.log(nightlyPart)
|
||||
} catch (exc) {
|
||||
console.error(exc);
|
||||
process.exit(1);
|
||||
console.error(exc)
|
||||
process.exit(1)
|
||||
}
|
||||
|
@ -1,73 +1,66 @@
|
||||
const { Octokit } = require("@octokit/core");
|
||||
const { Octokit } = require('@octokit/core')
|
||||
|
||||
const organization = "enso-org";
|
||||
const organization = 'enso-org'
|
||||
function determineRepositoryName() {
|
||||
const fallback = "enso";
|
||||
const fallbackMessage =
|
||||
"Could not determine the repository name, falling back to the default.";
|
||||
const fullName = process.env.GITHUB_REPOSITORY;
|
||||
if (!fullName) {
|
||||
console.log(fallbackMessage);
|
||||
return fallback;
|
||||
}
|
||||
const fallback = 'enso'
|
||||
const fallbackMessage = 'Could not determine the repository name, falling back to the default.'
|
||||
const fullName = process.env.GITHUB_REPOSITORY
|
||||
if (!fullName) {
|
||||
console.log(fallbackMessage)
|
||||
return fallback
|
||||
}
|
||||
|
||||
const prefix = organization + "/";
|
||||
if (fullName.startsWith(prefix)) {
|
||||
return fullName.substring(prefix.length);
|
||||
} else {
|
||||
console.log(fallbackMessage);
|
||||
return fallback;
|
||||
}
|
||||
const prefix = organization + '/'
|
||||
if (fullName.startsWith(prefix)) {
|
||||
return fullName.substring(prefix.length)
|
||||
} else {
|
||||
console.log(fallbackMessage)
|
||||
return fallback
|
||||
}
|
||||
}
|
||||
|
||||
const repo = determineRepositoryName();
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
const octokit = new Octokit({ auth: token });
|
||||
const repo = determineRepositoryName()
|
||||
const token = process.env.GITHUB_TOKEN
|
||||
const octokit = new Octokit({ auth: token })
|
||||
|
||||
function isNightly(release) {
|
||||
const nightlyInfix = "Nightly";
|
||||
return release.name.indexOf(nightlyInfix) >= 0 && !release.draft;
|
||||
const nightlyInfix = 'Nightly'
|
||||
return release.name.indexOf(nightlyInfix) >= 0 && !release.draft
|
||||
}
|
||||
|
||||
async function fetchAllReleases() {
|
||||
const res = await octokit.request("GET /repos/{owner}/{repo}/releases", {
|
||||
owner: organization,
|
||||
repo: repo,
|
||||
});
|
||||
return res.data;
|
||||
const res = await octokit.request('GET /repos/{owner}/{repo}/releases', {
|
||||
owner: organization,
|
||||
repo: repo,
|
||||
})
|
||||
return res.data
|
||||
}
|
||||
|
||||
async function fetchNightlies() {
|
||||
const releases = await fetchAllReleases();
|
||||
const nightlies = releases.filter(isNightly);
|
||||
return nightlies;
|
||||
const releases = await fetchAllReleases()
|
||||
const nightlies = releases.filter(isNightly)
|
||||
return nightlies
|
||||
}
|
||||
|
||||
async function triggerWorkflow(repo, workflow_id, ref) {
|
||||
await octokit.request(
|
||||
"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches",
|
||||
{
|
||||
owner: organization,
|
||||
repo: repo,
|
||||
workflow_id: workflow_id,
|
||||
ref: ref,
|
||||
}
|
||||
);
|
||||
await octokit.request('POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches', {
|
||||
owner: organization,
|
||||
repo: repo,
|
||||
workflow_id: workflow_id,
|
||||
ref: ref,
|
||||
})
|
||||
}
|
||||
|
||||
async function publishRelease(id) {
|
||||
return await octokit.request(
|
||||
"PATCH /repos/{owner}/{repo}/releases/{release_id}",
|
||||
{
|
||||
owner: organization,
|
||||
repo: repo,
|
||||
release_id: id,
|
||||
draft: false,
|
||||
}
|
||||
);
|
||||
return await octokit.request('PATCH /repos/{owner}/{repo}/releases/{release_id}', {
|
||||
owner: organization,
|
||||
repo: repo,
|
||||
release_id: id,
|
||||
draft: false,
|
||||
})
|
||||
}
|
||||
|
||||
exports.fetchAllReleases = fetchAllReleases;
|
||||
exports.fetchNightlies = fetchNightlies;
|
||||
exports.publishRelease = publishRelease;
|
||||
exports.repository = repo;
|
||||
exports.fetchAllReleases = fetchAllReleases
|
||||
exports.fetchNightlies = fetchNightlies
|
||||
exports.publishRelease = publishRelease
|
||||
exports.repository = repo
|
||||
|
@ -1,69 +1,67 @@
|
||||
const fs = require("fs");
|
||||
const github = require("./github");
|
||||
const fs = require('fs')
|
||||
const github = require('./github')
|
||||
|
||||
const currentHeadSha = process.argv[2];
|
||||
const buildConfigPath = "../../../build.sbt";
|
||||
const currentHeadSha = process.argv[2]
|
||||
const buildConfigPath = '../../../build.sbt'
|
||||
|
||||
/// Returns the current date formatted as 'YYYY-mm-dd'.
|
||||
function isoDate() {
|
||||
const now = new Date();
|
||||
const year = "" + now.getFullYear();
|
||||
let month = "" + (now.getMonth() + 1);
|
||||
let day = "" + now.getDate();
|
||||
if (month.length < 2) {
|
||||
month = "0" + month;
|
||||
}
|
||||
if (day.length < 2) {
|
||||
day = "0" + day;
|
||||
}
|
||||
return year + "-" + month + "-" + day;
|
||||
const now = new Date()
|
||||
const year = '' + now.getFullYear()
|
||||
let month = '' + (now.getMonth() + 1)
|
||||
let day = '' + now.getDate()
|
||||
if (month.length < 2) {
|
||||
month = '0' + month
|
||||
}
|
||||
if (day.length < 2) {
|
||||
day = '0' + day
|
||||
}
|
||||
return year + '-' + month + '-' + day
|
||||
}
|
||||
|
||||
/// Sets the step output 'proceed'.
|
||||
function setProceed(proceed) {
|
||||
console.log("::set-output name=proceed::" + proceed);
|
||||
console.log('::set-output name=proceed::' + proceed)
|
||||
}
|
||||
|
||||
/// Sets the step output 'nightly-version'.
|
||||
function setVersionString(name) {
|
||||
console.log("::set-output name=nightly-version::" + name);
|
||||
console.log('::set-output name=nightly-version::' + name)
|
||||
}
|
||||
|
||||
/// Sets the step output 'nightly-edition'.
|
||||
function setEditionName(name) {
|
||||
console.log("::set-output name=nightly-edition::" + name);
|
||||
console.log('::set-output name=nightly-edition::' + name)
|
||||
}
|
||||
|
||||
/** Checks if there are any new changes to see if the nightly build should
|
||||
* proceed.
|
||||
*/
|
||||
function checkProceed(nightlies) {
|
||||
if (nightlies.length == 0) {
|
||||
console.log(
|
||||
"No prior nightly releases found. Proceeding with the first release."
|
||||
);
|
||||
return true;
|
||||
}
|
||||
if (nightlies.length == 0) {
|
||||
console.log('No prior nightly releases found. Proceeding with the first release.')
|
||||
return true
|
||||
}
|
||||
|
||||
const first = nightlies[0];
|
||||
const firstNightlySha = first.target_commitish;
|
||||
if (firstNightlySha == currentHeadSha) {
|
||||
console.log(
|
||||
"Current commit (" +
|
||||
currentHeadSha +
|
||||
") is the same as for the most recent nightly build. A new build is not needed."
|
||||
);
|
||||
return false;
|
||||
} else {
|
||||
console.log(
|
||||
"Current commit (" +
|
||||
currentHeadSha +
|
||||
") is different from the most recent nightly build (" +
|
||||
firstNightlySha +
|
||||
"). Proceeding with a new nightly build."
|
||||
);
|
||||
return true;
|
||||
}
|
||||
const first = nightlies[0]
|
||||
const firstNightlySha = first.target_commitish
|
||||
if (firstNightlySha == currentHeadSha) {
|
||||
console.log(
|
||||
'Current commit (' +
|
||||
currentHeadSha +
|
||||
') is the same as for the most recent nightly build. A new build is not needed.'
|
||||
)
|
||||
return false
|
||||
} else {
|
||||
console.log(
|
||||
'Current commit (' +
|
||||
currentHeadSha +
|
||||
') is different from the most recent nightly build (' +
|
||||
firstNightlySha +
|
||||
'). Proceeding with a new nightly build.'
|
||||
)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
/** Prepares a version string and edition name for the nightly build.
|
||||
@ -73,60 +71,60 @@ function checkProceed(nightlies) {
|
||||
* increasing numeric suffix is added.
|
||||
*/
|
||||
function prepareVersions(nightlies) {
|
||||
function isTaken(suffix) {
|
||||
return nightlies.some((entry) => entry.tag_name.endsWith(suffix));
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(buildConfigPath, { encoding: "utf-8" });
|
||||
const match = content.match(/val ensoVersion += +"(.*)"/);
|
||||
if (!match) {
|
||||
console.error("Could not find the version string in configuration!");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const version = match[1];
|
||||
let baseName = version;
|
||||
if (!baseName.endsWith("SNAPSHOT")) {
|
||||
baseName += "-SNAPSHOT";
|
||||
}
|
||||
|
||||
const now = isoDate();
|
||||
function makeSuffix(ix) {
|
||||
if (ix == 0) {
|
||||
return now;
|
||||
} else {
|
||||
return now + "." + ix;
|
||||
function isTaken(suffix) {
|
||||
return nightlies.some(entry => entry.tag_name.endsWith(suffix))
|
||||
}
|
||||
}
|
||||
|
||||
let ix = 0;
|
||||
while (isTaken(makeSuffix(ix))) {
|
||||
ix++;
|
||||
}
|
||||
const content = fs.readFileSync(buildConfigPath, { encoding: 'utf-8' })
|
||||
const match = content.match(/val ensoVersion += +"(.*)"/)
|
||||
if (!match) {
|
||||
console.error('Could not find the version string in configuration!')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const suffix = makeSuffix(ix);
|
||||
const versionName = baseName + "." + suffix;
|
||||
const edition = "nightly-" + suffix;
|
||||
console.log("The build will be using version '" + versionName + "'");
|
||||
console.log("The build will be using edition '" + edition + "'");
|
||||
return {
|
||||
version: versionName,
|
||||
edition: edition,
|
||||
};
|
||||
const version = match[1]
|
||||
let baseName = version
|
||||
if (!baseName.endsWith('SNAPSHOT')) {
|
||||
baseName += '-SNAPSHOT'
|
||||
}
|
||||
|
||||
const now = isoDate()
|
||||
function makeSuffix(ix) {
|
||||
if (ix == 0) {
|
||||
return now
|
||||
} else {
|
||||
return now + '.' + ix
|
||||
}
|
||||
}
|
||||
|
||||
let ix = 0
|
||||
while (isTaken(makeSuffix(ix))) {
|
||||
ix++
|
||||
}
|
||||
|
||||
const suffix = makeSuffix(ix)
|
||||
const versionName = baseName + '.' + suffix
|
||||
const edition = 'nightly-' + suffix
|
||||
console.log("The build will be using version '" + versionName + "'")
|
||||
console.log("The build will be using edition '" + edition + "'")
|
||||
return {
|
||||
version: versionName,
|
||||
edition: edition,
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const nightlies = await github.fetchNightlies();
|
||||
const shouldProceed = checkProceed(nightlies);
|
||||
setProceed(shouldProceed);
|
||||
if (shouldProceed) {
|
||||
const versions = prepareVersions(nightlies);
|
||||
setVersionString(versions.version);
|
||||
setEditionName(versions.edition);
|
||||
}
|
||||
const nightlies = await github.fetchNightlies()
|
||||
const shouldProceed = checkProceed(nightlies)
|
||||
setProceed(shouldProceed)
|
||||
if (shouldProceed) {
|
||||
const versions = prepareVersions(nightlies)
|
||||
setVersionString(versions.version)
|
||||
setEditionName(versions.edition)
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
main().catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
@ -1,14 +1,14 @@
|
||||
const github = require("./github");
|
||||
const github = require('./github')
|
||||
|
||||
const releaseId = process.argv[2];
|
||||
const releaseId = process.argv[2]
|
||||
|
||||
async function main() {
|
||||
console.log("Making release " + releaseId + " public.");
|
||||
await github.publishRelease(releaseId);
|
||||
console.log("Done.");
|
||||
console.log('Making release ' + releaseId + ' public.')
|
||||
await github.publishRelease(releaseId)
|
||||
console.log('Done.')
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
main().catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
@ -1,18 +1,16 @@
|
||||
const github = require("./github");
|
||||
const github = require('./github')
|
||||
|
||||
const repo = process.argv[2];
|
||||
const workflow_id = process.argv[3];
|
||||
const ref = process.argv[4];
|
||||
const repo = process.argv[2]
|
||||
const workflow_id = process.argv[3]
|
||||
const ref = process.argv[4]
|
||||
|
||||
async function main() {
|
||||
console.log(
|
||||
"Triggering workflow " + workflow_id + " in " + repo + " on " + ref
|
||||
);
|
||||
await github.triggerWorkflow(repo, workflow_id, ref);
|
||||
console.log("Done.");
|
||||
console.log('Triggering workflow ' + workflow_id + ' in ' + repo + ' on ' + ref)
|
||||
await github.triggerWorkflow(repo, workflow_id, ref)
|
||||
console.log('Done.')
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
main().catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
@ -1,54 +1,54 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require("fs");
|
||||
const fs = require('fs')
|
||||
let usage = `Usage: add-release.js PATH TAG [ASSETS...]
|
||||
|
||||
Updates the release list at PATH by adding a new release (if it does not exist)
|
||||
with the provided TAG and list of ASSETS.`;
|
||||
with the provided TAG and list of ASSETS.`
|
||||
|
||||
if (process.argv.length < 4) {
|
||||
console.log(usage);
|
||||
process.exit(2);
|
||||
console.log(usage)
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
let path = process.argv[2];
|
||||
let tag = process.argv[3];
|
||||
let assets = process.argv.slice(4);
|
||||
let path = process.argv[2]
|
||||
let tag = process.argv[3]
|
||||
let assets = process.argv.slice(4)
|
||||
|
||||
if (assets.length == 0) {
|
||||
console.error("Adding a release with no assets.");
|
||||
console.error('Adding a release with no assets.')
|
||||
}
|
||||
|
||||
function releaseAlreadyExists(root, tag) {
|
||||
let existing = root["releases"].find((release) => release["tag"] == tag);
|
||||
return existing !== undefined;
|
||||
let existing = root['releases'].find(release => release['tag'] == tag)
|
||||
return existing !== undefined
|
||||
}
|
||||
|
||||
fs.readFile(path, "utf8", (err, data) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
let root = JSON.parse(data);
|
||||
if (releaseAlreadyExists(root, tag)) {
|
||||
console.error(`Release '${tag}' already exists.`);
|
||||
console.error("No changes written.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let release = {
|
||||
tag: tag,
|
||||
assets: assets,
|
||||
};
|
||||
|
||||
root["releases"].push(release);
|
||||
|
||||
fs.writeFile(path, JSON.stringify(root, null, 1) + "\n", (err) => {
|
||||
fs.readFile(path, 'utf8', (err, data) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(2);
|
||||
} else {
|
||||
console.error(`Added release ${tag} with assets ${assets}.`);
|
||||
console.error(err)
|
||||
process.exit(2)
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
let root = JSON.parse(data)
|
||||
if (releaseAlreadyExists(root, tag)) {
|
||||
console.error(`Release '${tag}' already exists.`)
|
||||
console.error('No changes written.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
let release = {
|
||||
tag: tag,
|
||||
assets: assets,
|
||||
}
|
||||
|
||||
root['releases'].push(release)
|
||||
|
||||
fs.writeFile(path, JSON.stringify(root, null, 1) + '\n', err => {
|
||||
if (err) {
|
||||
console.error(err)
|
||||
process.exit(2)
|
||||
} else {
|
||||
console.error(`Added release ${tag} with assets ${assets}.`)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
@ -1,39 +1,39 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require("fs");
|
||||
const fs = require('fs')
|
||||
let usage = `Usage: is-broken.js PATH
|
||||
|
||||
Reads the release metadata at PATH (in JSON format) and checks if it contains
|
||||
the broken mark. Exit code 0 indicates that the release contains the broken
|
||||
mark. Other exit codes mean that the release either could not be loaded or is
|
||||
not marked broken. If the release is marked as broken, it also prints the URL to
|
||||
download the broken mark file.`;
|
||||
download the broken mark file.`
|
||||
|
||||
if (process.argv.length != 3) {
|
||||
console.log(usage);
|
||||
process.exit(2);
|
||||
console.log(usage)
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
let path = process.argv[2];
|
||||
let path = process.argv[2]
|
||||
|
||||
function findBrokenMark(release) {
|
||||
let assets = release["assets"];
|
||||
return assets.find((asset) => asset["name"] == "broken");
|
||||
let assets = release['assets']
|
||||
return assets.find(asset => asset['name'] == 'broken')
|
||||
}
|
||||
|
||||
fs.readFile(path, "utf8", (err, data) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(2);
|
||||
}
|
||||
fs.readFile(path, 'utf8', (err, data) => {
|
||||
if (err) {
|
||||
console.error(err)
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
let release = JSON.parse(data);
|
||||
let mark = findBrokenMark(release);
|
||||
if (mark) {
|
||||
console.error("Release is marked as broken.");
|
||||
console.log(mark["url"]);
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.error("Release is NOT marked as broken.");
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
let release = JSON.parse(data)
|
||||
let mark = findBrokenMark(release)
|
||||
if (mark) {
|
||||
console.error('Release is marked as broken.')
|
||||
console.log(mark['url'])
|
||||
process.exit(0)
|
||||
} else {
|
||||
console.error('Release is NOT marked as broken.')
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
|
@ -1,43 +1,43 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require("fs");
|
||||
const fs = require('fs')
|
||||
let usage = `Usage: mark-broken.js PATH TAG
|
||||
|
||||
Updates the release list at PATH by adding the broken mark to the release with
|
||||
tag TAG.`;
|
||||
tag TAG.`
|
||||
|
||||
if (process.argv.length != 4) {
|
||||
console.log(usage);
|
||||
process.exit(2);
|
||||
console.log(usage)
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
let path = process.argv[2];
|
||||
let tag = process.argv[3];
|
||||
let path = process.argv[2]
|
||||
let tag = process.argv[3]
|
||||
|
||||
fs.readFile(path, "utf8", (err, data) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(2);
|
||||
}
|
||||
fs.readFile(path, 'utf8', (err, data) => {
|
||||
if (err) {
|
||||
console.error(err)
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
let root = JSON.parse(data);
|
||||
let release = root["releases"].find((release) => release["tag"] == tag);
|
||||
if (release === undefined) {
|
||||
console.error(`Release '${tag}' is not present in the metadata.`);
|
||||
console.error("No changes written.");
|
||||
process.exit(1);
|
||||
}
|
||||
let root = JSON.parse(data)
|
||||
let release = root['releases'].find(release => release['tag'] == tag)
|
||||
if (release === undefined) {
|
||||
console.error(`Release '${tag}' is not present in the metadata.`)
|
||||
console.error('No changes written.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (release["assets"].includes("broken")) {
|
||||
console.error("Broken mark is already present in the metadata.");
|
||||
} else {
|
||||
release["assets"].push("broken");
|
||||
fs.writeFile(path, JSON.stringify(root, null, 1) + "\n", (err) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(2);
|
||||
} else {
|
||||
console.error("Broken mark has been added.");
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
if (release['assets'].includes('broken')) {
|
||||
console.error('Broken mark is already present in the metadata.')
|
||||
} else {
|
||||
release['assets'].push('broken')
|
||||
fs.writeFile(path, JSON.stringify(root, null, 1) + '\n', err => {
|
||||
if (err) {
|
||||
console.error(err)
|
||||
process.exit(2)
|
||||
} else {
|
||||
console.error('Broken mark has been added.')
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
@ -1,140 +1,136 @@
|
||||
const reviewRoot = "../../target";
|
||||
const settingsRoot = "../../tools/legal-review";
|
||||
const reviewRoot = '../../target'
|
||||
const settingsRoot = '../../tools/legal-review'
|
||||
|
||||
const express = require("express");
|
||||
const app = express();
|
||||
const open = require("open");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const express = require('express')
|
||||
const app = express()
|
||||
const open = require('open')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
// The home page that lists available reports.
|
||||
app.get("/", function (req, res) {
|
||||
let html = "<h1>Report review</h1>";
|
||||
const files = fs.readdirSync(reviewRoot);
|
||||
const reports = files
|
||||
.map((f) => f.match(/^(.*)-report.html$/))
|
||||
.filter((m) => m != null)
|
||||
.map((m) => m[1]);
|
||||
if (reports.length == 0) {
|
||||
html +=
|
||||
"No reports found. " +
|
||||
'Run <pre style="display:inline">enso / gatherLicenses</pre> first.';
|
||||
} else {
|
||||
html += "Select report:";
|
||||
html += "<ul>";
|
||||
reports.forEach((report) => {
|
||||
html += '<li><a href="/report/' + report + '">' + report + "</a></li>";
|
||||
});
|
||||
html += "</ul>";
|
||||
}
|
||||
res.send(html);
|
||||
});
|
||||
app.get('/', function (req, res) {
|
||||
let html = '<h1>Report review</h1>'
|
||||
const files = fs.readdirSync(reviewRoot)
|
||||
const reports = files
|
||||
.map(f => f.match(/^(.*)-report.html$/))
|
||||
.filter(m => m != null)
|
||||
.map(m => m[1])
|
||||
if (reports.length == 0) {
|
||||
html +=
|
||||
'No reports found. ' +
|
||||
'Run <pre style="display:inline">enso / gatherLicenses</pre> first.'
|
||||
} else {
|
||||
html += 'Select report:'
|
||||
html += '<ul>'
|
||||
reports.forEach(report => {
|
||||
html += '<li><a href="/report/' + report + '">' + report + '</a></li>'
|
||||
})
|
||||
html += '</ul>'
|
||||
}
|
||||
res.send(html)
|
||||
})
|
||||
|
||||
// Serves the injection script.
|
||||
app.use("/static", express.static("static"));
|
||||
app.use('/static', express.static('static'))
|
||||
|
||||
// Serves contents of the given report, injecting the review-mode script.
|
||||
app.get("/report/:report", function (req, res) {
|
||||
const report = req.params["report"];
|
||||
console.log("Opening report for ", report);
|
||||
fs.readFile(
|
||||
path.join(reviewRoot, report + "-report.html"),
|
||||
"utf-8",
|
||||
(err, data) => {
|
||||
const injection =
|
||||
'<script src="/static/inject.js"></script>' +
|
||||
'<script>var reportName = "' +
|
||||
report +
|
||||
'";</script>';
|
||||
if (err) {
|
||||
res.status(400).send(err);
|
||||
} else {
|
||||
const injected = data.replace("</head>", injection + "</head>");
|
||||
res.send(injected);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
app.get('/report/:report', function (req, res) {
|
||||
const report = req.params['report']
|
||||
console.log('Opening report for ', report)
|
||||
fs.readFile(path.join(reviewRoot, report + '-report.html'), 'utf-8', (err, data) => {
|
||||
const injection =
|
||||
'<script src="/static/inject.js"></script>' +
|
||||
'<script>var reportName = "' +
|
||||
report +
|
||||
'";</script>'
|
||||
if (err) {
|
||||
res.status(400).send(err)
|
||||
} else {
|
||||
const injected = data.replace('</head>', injection + '</head>')
|
||||
res.send(injected)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Appends a line to the setting file.
|
||||
function addLine(report, package, file, line) {
|
||||
const dir = path.join(settingsRoot, report, package);
|
||||
const location = path.join(dir, file);
|
||||
console.log("Adding " + line + " to " + location);
|
||||
fs.mkdirSync(dir, {
|
||||
recursive: true,
|
||||
});
|
||||
fs.appendFileSync(location, line + "\n");
|
||||
const dir = path.join(settingsRoot, report, package)
|
||||
const location = path.join(dir, file)
|
||||
console.log('Adding ' + line + ' to ' + location)
|
||||
fs.mkdirSync(dir, {
|
||||
recursive: true,
|
||||
})
|
||||
fs.appendFileSync(location, line + '\n')
|
||||
}
|
||||
|
||||
// Removes a line from the setting file.
|
||||
function removeLine(report, package, file, line) {
|
||||
const location = path.join(settingsRoot, report, package, file);
|
||||
console.log("Removing " + line + " from " + location);
|
||||
const lines = fs
|
||||
.readFileSync(location, "utf-8")
|
||||
.split(/\r?\n/)
|
||||
.filter((x) => x.length > 0);
|
||||
const toRemove = lines.filter((x) => x == line);
|
||||
const others = lines.filter((x) => x != line);
|
||||
if (toRemove.length == 0) {
|
||||
throw (
|
||||
"Line " +
|
||||
line +
|
||||
" was not present in the file. " +
|
||||
"Are you sure the report is up to date?"
|
||||
);
|
||||
} else {
|
||||
var newContent = others.join("\n") + "\n";
|
||||
if (others.length == 0) {
|
||||
newContent = "";
|
||||
const location = path.join(settingsRoot, report, package, file)
|
||||
console.log('Removing ' + line + ' from ' + location)
|
||||
const lines = fs
|
||||
.readFileSync(location, 'utf-8')
|
||||
.split(/\r?\n/)
|
||||
.filter(x => x.length > 0)
|
||||
const toRemove = lines.filter(x => x == line)
|
||||
const others = lines.filter(x => x != line)
|
||||
if (toRemove.length == 0) {
|
||||
throw (
|
||||
'Line ' +
|
||||
line +
|
||||
' was not present in the file. ' +
|
||||
'Are you sure the report is up to date?'
|
||||
)
|
||||
} else {
|
||||
var newContent = others.join('\n') + '\n'
|
||||
if (others.length == 0) {
|
||||
newContent = ''
|
||||
}
|
||||
fs.writeFileSync(location, newContent)
|
||||
}
|
||||
fs.writeFileSync(location, newContent);
|
||||
}
|
||||
}
|
||||
|
||||
// Handles the requests to add or remove lines.
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
app.post("/modify/:report", function (req, res) {
|
||||
const report = req.params["report"];
|
||||
const package = req.body["package"];
|
||||
const action = req.body["action"];
|
||||
const file = req.body["file"];
|
||||
let line = req.body["line"];
|
||||
const encodedLine = req.body["encoded_line"];
|
||||
if (encodedLine !== undefined) {
|
||||
line = Buffer.from(encodedLine, "base64").toString();
|
||||
}
|
||||
|
||||
try {
|
||||
if (action == "add") {
|
||||
addLine(report, package, file, line);
|
||||
} else if (action == "remove") {
|
||||
removeLine(report, package, file, line);
|
||||
} else {
|
||||
throw "Unknown action";
|
||||
app.use(express.urlencoded({ extended: true }))
|
||||
app.post('/modify/:report', function (req, res) {
|
||||
const report = req.params['report']
|
||||
const package = req.body['package']
|
||||
const action = req.body['action']
|
||||
const file = req.body['file']
|
||||
let line = req.body['line']
|
||||
const encodedLine = req.body['encoded_line']
|
||||
if (encodedLine !== undefined) {
|
||||
line = Buffer.from(encodedLine, 'base64').toString()
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
res.status(500).send(error);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
if (action == 'add') {
|
||||
addLine(report, package, file, line)
|
||||
} else if (action == 'remove') {
|
||||
removeLine(report, package, file, line)
|
||||
} else {
|
||||
throw 'Unknown action'
|
||||
}
|
||||
res.send('OK')
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
res.status(500).send(error)
|
||||
}
|
||||
})
|
||||
|
||||
/*
|
||||
* Listens on a random free port, opens a browser with the home page and waits
|
||||
* for a newline to terminate.
|
||||
*/
|
||||
const server = app.listen(0, () => {
|
||||
const port = server.address().port;
|
||||
console.log("Listening on at ", "http://localhost:" + port + "/");
|
||||
open("http://localhost:" + port + "/");
|
||||
const port = server.address().port
|
||||
console.log('Listening on at ', 'http://localhost:' + port + '/')
|
||||
open('http://localhost:' + port + '/')
|
||||
|
||||
console.log("Press ENTER to stop the server.");
|
||||
process.stdin.on("data", function (chunk) {
|
||||
if (chunk.indexOf("\n") >= 0) {
|
||||
console.log("Good bye");
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
console.log('Press ENTER to stop the server.')
|
||||
process.stdin.on('data', function (chunk) {
|
||||
if (chunk.indexOf('\n') >= 0) {
|
||||
console.log('Good bye')
|
||||
process.exit(0)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
@ -1,140 +1,128 @@
|
||||
/** Sets a status text in bottom left part of the screen. */
|
||||
function setStatus(text, color) {
|
||||
var status = $("#status");
|
||||
status.html(text);
|
||||
if (color === undefined) {
|
||||
color = "white";
|
||||
}
|
||||
status.css("background-color", color);
|
||||
var status = $('#status')
|
||||
status.html(text)
|
||||
if (color === undefined) {
|
||||
color = 'white'
|
||||
}
|
||||
status.css('background-color', color)
|
||||
}
|
||||
|
||||
/** Creates a handler that will request to add or remove a line from a file. */
|
||||
function makeHandler(elem, data, file, action) {
|
||||
return function (ev) {
|
||||
data["file"] = file;
|
||||
data["action"] = action;
|
||||
$.post("/modify/" + reportName, data, function (response) {
|
||||
$(elem).html(
|
||||
'<span style="color:gray">Modified, if you want to ' +
|
||||
"change this value, regenerate the report first</span>"
|
||||
);
|
||||
var tab = $(elem).closest("div").parent();
|
||||
var title = tab.children("h4");
|
||||
tab.accordion("option", "active", false);
|
||||
var info = "added " + file;
|
||||
if (action == "remove") {
|
||||
info = "undone review";
|
||||
}
|
||||
var newTitle =
|
||||
'<span style="text-decoration: line-through;">' +
|
||||
title.html() +
|
||||
"</span><br>" +
|
||||
info;
|
||||
title.html(newTitle);
|
||||
title.find("span").css("color", "gray");
|
||||
setStatus("Review for " + data["package"] + " sent.");
|
||||
}).fail(function (err) {
|
||||
setStatus("Failed to send review: " + JSON.stringify(err), "red");
|
||||
});
|
||||
setStatus("Sending review...");
|
||||
};
|
||||
return function (ev) {
|
||||
data['file'] = file
|
||||
data['action'] = action
|
||||
$.post('/modify/' + reportName, data, function (response) {
|
||||
$(elem).html(
|
||||
'<span style="color:gray">Modified, if you want to ' +
|
||||
'change this value, regenerate the report first</span>'
|
||||
)
|
||||
var tab = $(elem).closest('div').parent()
|
||||
var title = tab.children('h4')
|
||||
tab.accordion('option', 'active', false)
|
||||
var info = 'added ' + file
|
||||
if (action == 'remove') {
|
||||
info = 'undone review'
|
||||
}
|
||||
var newTitle =
|
||||
'<span style="text-decoration: line-through;">' +
|
||||
title.html() +
|
||||
'</span><br>' +
|
||||
info
|
||||
title.html(newTitle)
|
||||
title.find('span').css('color', 'gray')
|
||||
setStatus('Review for ' + data['package'] + ' sent.')
|
||||
}).fail(function (err) {
|
||||
setStatus('Failed to send review: ' + JSON.stringify(err), 'red')
|
||||
})
|
||||
setStatus('Sending review...')
|
||||
}
|
||||
}
|
||||
|
||||
$(function () {
|
||||
$("body").prepend(
|
||||
'<div style="color:red">This review helper tool does not regenerate the ' +
|
||||
"report - to see the changes that are applied using this tool after " +
|
||||
"refreshing the page, you need to regenerate the report using the " +
|
||||
"`gatherLicenses` command.</div>"
|
||||
);
|
||||
$("body").append(
|
||||
'<div id="status" ' +
|
||||
'style="position: fixed;left:4pt;bottom:4pt">' +
|
||||
"Loading...</div>"
|
||||
);
|
||||
var copys = $(".copyright-ui");
|
||||
var files = $(".file-ui");
|
||||
$('body').prepend(
|
||||
'<div style="color:red">This review helper tool does not regenerate the ' +
|
||||
'report - to see the changes that are applied using this tool after ' +
|
||||
'refreshing the page, you need to regenerate the report using the ' +
|
||||
'`gatherLicenses` command.</div>'
|
||||
)
|
||||
$('body').append(
|
||||
'<div id="status" ' + 'style="position: fixed;left:4pt;bottom:4pt">' + 'Loading...</div>'
|
||||
)
|
||||
var copys = $('.copyright-ui')
|
||||
var files = $('.file-ui')
|
||||
|
||||
copyrightMap = {
|
||||
Ignore: "copyright-ignore",
|
||||
KeepWithContext: "copyright-keep-context",
|
||||
Keep: "copyright-keep",
|
||||
};
|
||||
|
||||
copys.each(function (index) {
|
||||
var package = $(this).data("package");
|
||||
var encodedContent = $(this).data("content");
|
||||
var status = $(this).data("status");
|
||||
var contexts = parseInt($(this).data("contexts"));
|
||||
var data = {
|
||||
encoded_line: encodedContent,
|
||||
package: package,
|
||||
};
|
||||
if (status == "NotReviewed") {
|
||||
var buttons =
|
||||
'<button class="ignore">Ignore</button>' +
|
||||
'<button class="keep">Keep</button>' +
|
||||
'<button class="keepctx">Keep as context</button>';
|
||||
$(this).html(buttons);
|
||||
$(this)
|
||||
.children(".ignore")
|
||||
.on("click", makeHandler(this, data, "copyright-ignore", "add"));
|
||||
$(this)
|
||||
.children(".keep")
|
||||
.on("click", makeHandler(this, data, "copyright-keep", "add"));
|
||||
if (contexts == 1) {
|
||||
$(this)
|
||||
.children(".keepctx")
|
||||
.on(
|
||||
"click",
|
||||
makeHandler(this, data, "copyright-keep-context", "add")
|
||||
);
|
||||
} else {
|
||||
$(this).children(".keepctx").attr("disabled", true);
|
||||
}
|
||||
} else if (status != "Added") {
|
||||
$(this).html("<button>Undo review</button>");
|
||||
$(this)
|
||||
.children("button")
|
||||
.on("click", makeHandler(this, data, copyrightMap[status], "remove"));
|
||||
} else {
|
||||
$(this).html("<button disabled>This notice was added manually</button>");
|
||||
copyrightMap = {
|
||||
Ignore: 'copyright-ignore',
|
||||
KeepWithContext: 'copyright-keep-context',
|
||||
Keep: 'copyright-keep',
|
||||
}
|
||||
});
|
||||
|
||||
filesMap = {
|
||||
Ignore: "files-ignore",
|
||||
Keep: "files-keep",
|
||||
};
|
||||
copys.each(function (index) {
|
||||
var package = $(this).data('package')
|
||||
var encodedContent = $(this).data('content')
|
||||
var status = $(this).data('status')
|
||||
var contexts = parseInt($(this).data('contexts'))
|
||||
var data = {
|
||||
encoded_line: encodedContent,
|
||||
package: package,
|
||||
}
|
||||
if (status == 'NotReviewed') {
|
||||
var buttons =
|
||||
'<button class="ignore">Ignore</button>' +
|
||||
'<button class="keep">Keep</button>' +
|
||||
'<button class="keepctx">Keep as context</button>'
|
||||
$(this).html(buttons)
|
||||
$(this)
|
||||
.children('.ignore')
|
||||
.on('click', makeHandler(this, data, 'copyright-ignore', 'add'))
|
||||
$(this).children('.keep').on('click', makeHandler(this, data, 'copyright-keep', 'add'))
|
||||
if (contexts == 1) {
|
||||
$(this)
|
||||
.children('.keepctx')
|
||||
.on('click', makeHandler(this, data, 'copyright-keep-context', 'add'))
|
||||
} else {
|
||||
$(this).children('.keepctx').attr('disabled', true)
|
||||
}
|
||||
} else if (status != 'Added') {
|
||||
$(this).html('<button>Undo review</button>')
|
||||
$(this)
|
||||
.children('button')
|
||||
.on('click', makeHandler(this, data, copyrightMap[status], 'remove'))
|
||||
} else {
|
||||
$(this).html('<button disabled>This notice was added manually</button>')
|
||||
}
|
||||
})
|
||||
|
||||
files.each(function (index) {
|
||||
var package = $(this).data("package");
|
||||
var filename = $(this).data("filename");
|
||||
var status = $(this).data("status");
|
||||
var data = {
|
||||
line: filename,
|
||||
package: package,
|
||||
};
|
||||
if (status == "NotReviewed") {
|
||||
var buttons =
|
||||
'<button class="ignore">Ignore</button>' +
|
||||
'<button class="keep">Keep</button>';
|
||||
$(this).html(buttons);
|
||||
$(this)
|
||||
.children(".ignore")
|
||||
.on("click", makeHandler(this, data, "files-ignore", "add"));
|
||||
$(this)
|
||||
.children(".keep")
|
||||
.on("click", makeHandler(this, data, "files-keep", "add"));
|
||||
} else if (status != "Added") {
|
||||
$(this).html("<button>Undo review</button>");
|
||||
$(this)
|
||||
.children("button")
|
||||
.on("click", makeHandler(this, data, filesMap[status], "remove"));
|
||||
} else {
|
||||
$(this).html("<button disabled>This file was added manually</button>");
|
||||
filesMap = {
|
||||
Ignore: 'files-ignore',
|
||||
Keep: 'files-keep',
|
||||
}
|
||||
});
|
||||
|
||||
setStatus("Initialized");
|
||||
});
|
||||
files.each(function (index) {
|
||||
var package = $(this).data('package')
|
||||
var filename = $(this).data('filename')
|
||||
var status = $(this).data('status')
|
||||
var data = {
|
||||
line: filename,
|
||||
package: package,
|
||||
}
|
||||
if (status == 'NotReviewed') {
|
||||
var buttons =
|
||||
'<button class="ignore">Ignore</button>' + '<button class="keep">Keep</button>'
|
||||
$(this).html(buttons)
|
||||
$(this).children('.ignore').on('click', makeHandler(this, data, 'files-ignore', 'add'))
|
||||
$(this).children('.keep').on('click', makeHandler(this, data, 'files-keep', 'add'))
|
||||
} else if (status != 'Added') {
|
||||
$(this).html('<button>Undo review</button>')
|
||||
$(this)
|
||||
.children('button')
|
||||
.on('click', makeHandler(this, data, filesMap[status], 'remove'))
|
||||
} else {
|
||||
$(this).html('<button disabled>This file was added manually</button>')
|
||||
}
|
||||
})
|
||||
|
||||
setStatus('Initialized')
|
||||
})
|
||||
|
@ -1,67 +1,65 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const process = require("child_process");
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const process = require('child_process')
|
||||
|
||||
/// List of configs to clean.
|
||||
const configPaths = [
|
||||
"../../engine/launcher/src/main/resources/META-INF/native-image/org/enso/launcher",
|
||||
"../../lib/scala/project-manager/src/main/resources/META-INF/native-image/org/enso/projectmanager",
|
||||
];
|
||||
'../../engine/launcher/src/main/resources/META-INF/native-image/org/enso/launcher',
|
||||
'../../lib/scala/project-manager/src/main/resources/META-INF/native-image/org/enso/projectmanager',
|
||||
]
|
||||
|
||||
/// Checks if the entry is ephemeral (contains a pointer in its name).
|
||||
function isEntryEphemeralClass(entry) {
|
||||
const name = entry["name"];
|
||||
if (name === undefined) {
|
||||
return false;
|
||||
}
|
||||
return name.indexOf("/0x00") >= 0;
|
||||
const name = entry['name']
|
||||
if (name === undefined) {
|
||||
return false
|
||||
}
|
||||
return name.indexOf('/0x00') >= 0
|
||||
}
|
||||
|
||||
/// Sorts the list of entries in a predictable order.
|
||||
function sortEntries(entries) {
|
||||
const copy = Array.from(entries);
|
||||
copy.sort((first, second) => {
|
||||
const firstName = first["name"];
|
||||
const secondName = second["name"];
|
||||
if (firstName !== undefined && secondName !== undefined) {
|
||||
return firstName < secondName;
|
||||
} else if (firstName === undefined && secondName === undefined) {
|
||||
return JSON.stringify(first) < JSON.stringify(second);
|
||||
} else {
|
||||
return firstName === undefined;
|
||||
}
|
||||
});
|
||||
return copy;
|
||||
const copy = Array.from(entries)
|
||||
copy.sort((first, second) => {
|
||||
const firstName = first['name']
|
||||
const secondName = second['name']
|
||||
if (firstName !== undefined && secondName !== undefined) {
|
||||
return firstName < secondName
|
||||
} else if (firstName === undefined && secondName === undefined) {
|
||||
return JSON.stringify(first) < JSON.stringify(second)
|
||||
} else {
|
||||
return firstName === undefined
|
||||
}
|
||||
})
|
||||
return copy
|
||||
}
|
||||
|
||||
/// Removes ephemeral classes from the reflection config and ensures it has a
|
||||
/// stable order of entries.
|
||||
function cleanReflectionConfig(reflectConfigPath) {
|
||||
const data = fs.readFileSync(reflectConfigPath, "utf-8");
|
||||
const parsed = JSON.parse(data);
|
||||
const withoutEphemeral = parsed.filter(
|
||||
(entry) => !isEntryEphemeralClass(entry)
|
||||
);
|
||||
const sorted = sortEntries(withoutEphemeral);
|
||||
const serialized = JSON.stringify(sorted);
|
||||
const hasChanges = serialized !== JSON.stringify(parsed);
|
||||
if (hasChanges) {
|
||||
fs.writeFileSync(reflectConfigPath, serialized);
|
||||
console.log("Rewritten " + reflectConfigPath);
|
||||
} else {
|
||||
console.log("No changes in " + reflectConfigPath);
|
||||
}
|
||||
const data = fs.readFileSync(reflectConfigPath, 'utf-8')
|
||||
const parsed = JSON.parse(data)
|
||||
const withoutEphemeral = parsed.filter(entry => !isEntryEphemeralClass(entry))
|
||||
const sorted = sortEntries(withoutEphemeral)
|
||||
const serialized = JSON.stringify(sorted)
|
||||
const hasChanges = serialized !== JSON.stringify(parsed)
|
||||
if (hasChanges) {
|
||||
fs.writeFileSync(reflectConfigPath, serialized)
|
||||
console.log('Rewritten ' + reflectConfigPath)
|
||||
} else {
|
||||
console.log('No changes in ' + reflectConfigPath)
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs prettier on the provided path.
|
||||
function runPrettier(configPath) {
|
||||
console.log("Running prettier for " + configPath);
|
||||
process.spawn("npx", ["prettier", "--write", configPath], {
|
||||
stdio: "inherit",
|
||||
});
|
||||
console.log('Running prettier for ' + configPath)
|
||||
process.spawn('npx', ['prettier', '--write', configPath], {
|
||||
stdio: 'inherit',
|
||||
})
|
||||
}
|
||||
|
||||
configPaths.forEach(function (configPath) {
|
||||
cleanReflectionConfig(path.join(configPath, "reflect-config.json"));
|
||||
runPrettier(configPath);
|
||||
});
|
||||
cleanReflectionConfig(path.join(configPath, 'reflect-config.json'))
|
||||
runPrettier(configPath)
|
||||
})
|
||||
|
@ -1,199 +1,195 @@
|
||||
#!/usr/bin/env node
|
||||
const express = require("express");
|
||||
const crypto = require("crypto");
|
||||
const path = require("path");
|
||||
const os = require("os");
|
||||
const fs = require("fs");
|
||||
const fsPromises = require("fs/promises");
|
||||
const multer = require("multer");
|
||||
const compression = require("compression");
|
||||
const yargs = require("yargs");
|
||||
const semverValid = require("semver/functions/valid");
|
||||
const express = require('express')
|
||||
const crypto = require('crypto')
|
||||
const path = require('path')
|
||||
const os = require('os')
|
||||
const fs = require('fs')
|
||||
const fsPromises = require('fs/promises')
|
||||
const multer = require('multer')
|
||||
const compression = require('compression')
|
||||
const yargs = require('yargs')
|
||||
const semverValid = require('semver/functions/valid')
|
||||
|
||||
const argv = yargs
|
||||
.usage(
|
||||
"$0",
|
||||
"Allows to host Enso libraries and editions from the local filesystem through HTTP."
|
||||
)
|
||||
.option("port", {
|
||||
description: "The port to listen on.",
|
||||
type: "number",
|
||||
default: 8080,
|
||||
})
|
||||
.option("root", {
|
||||
description:
|
||||
"The root of the repository. It should contain a `libraries` or `editions` directory. See the documentation for more details.",
|
||||
type: "string",
|
||||
default: ".",
|
||||
})
|
||||
.option("upload", {
|
||||
description:
|
||||
"Specifies whether to allow uploading libraries and which authentication model to choose.",
|
||||
choices: ["disabled", "no-auth", "constant-token"],
|
||||
default: "disabled",
|
||||
})
|
||||
.help()
|
||||
.alias("help", "h").argv;
|
||||
.usage(
|
||||
'$0',
|
||||
'Allows to host Enso libraries and editions from the local filesystem through HTTP.'
|
||||
)
|
||||
.option('port', {
|
||||
description: 'The port to listen on.',
|
||||
type: 'number',
|
||||
default: 8080,
|
||||
})
|
||||
.option('root', {
|
||||
description:
|
||||
'The root of the repository. It should contain a `libraries` or `editions` directory. See the documentation for more details.',
|
||||
type: 'string',
|
||||
default: '.',
|
||||
})
|
||||
.option('upload', {
|
||||
description:
|
||||
'Specifies whether to allow uploading libraries and which authentication model to choose.',
|
||||
choices: ['disabled', 'no-auth', 'constant-token'],
|
||||
default: 'disabled',
|
||||
})
|
||||
.help()
|
||||
.alias('help', 'h').argv
|
||||
|
||||
const libraryRoot = path.join(argv.root, "libraries");
|
||||
const libraryRoot = path.join(argv.root, 'libraries')
|
||||
|
||||
const app = express();
|
||||
const tmpDir = path.join(os.tmpdir(), "enso-library-repo-uploads");
|
||||
const upload = multer({ dest: tmpDir });
|
||||
app.use(compression({ filter: shouldCompress }));
|
||||
const app = express()
|
||||
const tmpDir = path.join(os.tmpdir(), 'enso-library-repo-uploads')
|
||||
const upload = multer({ dest: tmpDir })
|
||||
app.use(compression({ filter: shouldCompress }))
|
||||
|
||||
/** The token to compare against for simple authentication.
|
||||
*
|
||||
* If it is not set, no authentication checks are made.
|
||||
*/
|
||||
let token = null;
|
||||
if (argv.upload == "disabled") {
|
||||
console.log("Uploads are disabled.");
|
||||
let token = null
|
||||
if (argv.upload == 'disabled') {
|
||||
console.log('Uploads are disabled.')
|
||||
} else {
|
||||
app.post("/upload", upload.any(), handleUpload);
|
||||
app.post('/upload', upload.any(), handleUpload)
|
||||
|
||||
if (argv.upload == "constant-token") {
|
||||
const envVar = "ENSO_AUTH_TOKEN";
|
||||
token = process.env[envVar];
|
||||
if (!token) {
|
||||
throw `${envVar} is not defined.`;
|
||||
if (argv.upload == 'constant-token') {
|
||||
const envVar = 'ENSO_AUTH_TOKEN'
|
||||
token = process.env[envVar]
|
||||
if (!token) {
|
||||
throw `${envVar} is not defined.`
|
||||
} else {
|
||||
console.log(`Checking the ${envVar} to authorize requests.`)
|
||||
}
|
||||
} else {
|
||||
console.log(`Checking the ${envVar} to authorize requests.`);
|
||||
console.log('WARNING: Uploads are enabled without any authentication.')
|
||||
}
|
||||
} else {
|
||||
console.log("WARNING: Uploads are enabled without any authentication.");
|
||||
}
|
||||
}
|
||||
|
||||
app.get("/health", function (req, res) {
|
||||
res.status(200).send("OK");
|
||||
});
|
||||
app.get('/health', function (req, res) {
|
||||
res.status(200).send('OK')
|
||||
})
|
||||
|
||||
app.use(express.static(argv.root));
|
||||
app.use(express.static(argv.root))
|
||||
|
||||
let port = argv.port;
|
||||
let port = argv.port
|
||||
if (process.env.PORT) {
|
||||
port = process.env.PORT;
|
||||
console.log(
|
||||
`Overriding the port to ${port} set by the PORT environment variable.`
|
||||
);
|
||||
port = process.env.PORT
|
||||
console.log(`Overriding the port to ${port} set by the PORT environment variable.`)
|
||||
}
|
||||
console.log(
|
||||
`Serving the repository located under ${argv.root} on port ${port}.`
|
||||
);
|
||||
console.log(`Serving the repository located under ${argv.root} on port ${port}.`)
|
||||
|
||||
const server = app.listen(port);
|
||||
const server = app.listen(port)
|
||||
|
||||
function handleShutdown() {
|
||||
console.log("Received a signal - shutting down.");
|
||||
server.close(() => {
|
||||
console.log("Server terminated.");
|
||||
});
|
||||
console.log('Received a signal - shutting down.')
|
||||
server.close(() => {
|
||||
console.log('Server terminated.')
|
||||
})
|
||||
}
|
||||
process.on("SIGTERM", handleShutdown);
|
||||
process.on("SIGINT", handleShutdown);
|
||||
process.on('SIGTERM', handleShutdown)
|
||||
process.on('SIGINT', handleShutdown)
|
||||
|
||||
/// Specifies if a particular file can be compressed in transfer, if supported.
|
||||
function shouldCompress(req, res) {
|
||||
if (req.path.endsWith(".yaml")) {
|
||||
return true;
|
||||
}
|
||||
if (req.path.endsWith('.yaml')) {
|
||||
return true
|
||||
}
|
||||
|
||||
return compression.filter(req, res);
|
||||
return compression.filter(req, res)
|
||||
}
|
||||
|
||||
/** Handles upload of a library. */
|
||||
async function handleUpload(req, res) {
|
||||
function fail(code, message) {
|
||||
res.status(code).json({ error: message });
|
||||
cleanFiles(req.files);
|
||||
}
|
||||
|
||||
if (token !== null) {
|
||||
const userToken = req.get("Auth-Token");
|
||||
if (userToken != token) {
|
||||
return fail(403, "Authorization failed.");
|
||||
function fail(code, message) {
|
||||
res.status(code).json({ error: message })
|
||||
cleanFiles(req.files)
|
||||
}
|
||||
}
|
||||
|
||||
const version = req.query.version;
|
||||
const namespace = req.query.namespace;
|
||||
const name = req.query.name;
|
||||
|
||||
if (version === undefined || namespace == undefined || name === undefined) {
|
||||
return fail(400, "One or more required fields were missing.");
|
||||
}
|
||||
|
||||
if (!isVersionValid(version)) {
|
||||
return fail(400, `Invalid semver version string [${version}].`);
|
||||
}
|
||||
|
||||
if (!isNamespaceValid(namespace)) {
|
||||
return fail(400, `Invalid username [${namespace}].`);
|
||||
}
|
||||
|
||||
if (!isNameValid(name)) {
|
||||
return fail(400, `Invalid library name [${name}].`);
|
||||
}
|
||||
|
||||
for (var i = 0; i < req.files.length; ++i) {
|
||||
const filename = req.files[i].originalname;
|
||||
if (!isFilenameValid(filename)) {
|
||||
return fail(400, `Invalid filename: ${filename}.`);
|
||||
if (token !== null) {
|
||||
const userToken = req.get('Auth-Token')
|
||||
if (userToken != token) {
|
||||
return fail(403, 'Authorization failed.')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const libraryBasePath = path.join(libraryRoot, namespace, name);
|
||||
const libraryPath = path.join(libraryBasePath, version);
|
||||
const version = req.query.version
|
||||
const namespace = req.query.namespace
|
||||
const name = req.query.name
|
||||
|
||||
/** Finds a name for a temporary directory to move the files to,
|
||||
if (version === undefined || namespace == undefined || name === undefined) {
|
||||
return fail(400, 'One or more required fields were missing.')
|
||||
}
|
||||
|
||||
if (!isVersionValid(version)) {
|
||||
return fail(400, `Invalid semver version string [${version}].`)
|
||||
}
|
||||
|
||||
if (!isNamespaceValid(namespace)) {
|
||||
return fail(400, `Invalid username [${namespace}].`)
|
||||
}
|
||||
|
||||
if (!isNameValid(name)) {
|
||||
return fail(400, `Invalid library name [${name}].`)
|
||||
}
|
||||
|
||||
for (var i = 0; i < req.files.length; ++i) {
|
||||
const filename = req.files[i].originalname
|
||||
if (!isFilenameValid(filename)) {
|
||||
return fail(400, `Invalid filename: ${filename}.`)
|
||||
}
|
||||
}
|
||||
|
||||
const libraryBasePath = path.join(libraryRoot, namespace, name)
|
||||
const libraryPath = path.join(libraryBasePath, version)
|
||||
|
||||
/** Finds a name for a temporary directory to move the files to,
|
||||
so that the upload can then be committed atomically by renaming
|
||||
a single directory. */
|
||||
function findRandomTemporaryDirectory() {
|
||||
const randomName = crypto.randomBytes(32).toString("hex");
|
||||
const temporaryPath = path.join(libraryBasePath, randomName);
|
||||
if (fs.existsSync(temporaryPath)) {
|
||||
return findRandomTemporaryDirectory();
|
||||
function findRandomTemporaryDirectory() {
|
||||
const randomName = crypto.randomBytes(32).toString('hex')
|
||||
const temporaryPath = path.join(libraryBasePath, randomName)
|
||||
if (fs.existsSync(temporaryPath)) {
|
||||
return findRandomTemporaryDirectory()
|
||||
}
|
||||
|
||||
return temporaryPath
|
||||
}
|
||||
|
||||
return temporaryPath;
|
||||
}
|
||||
if (fs.existsSync(libraryPath)) {
|
||||
return fail(
|
||||
409,
|
||||
'A library with the given name and version ' +
|
||||
'combination already exists. Versions are immutable, so you must ' +
|
||||
'bump the library version when uploading a newer version.'
|
||||
)
|
||||
}
|
||||
|
||||
if (fs.existsSync(libraryPath)) {
|
||||
return fail(
|
||||
409,
|
||||
"A library with the given name and version " +
|
||||
"combination already exists. Versions are immutable, so you must " +
|
||||
"bump the library version when uploading a newer version."
|
||||
);
|
||||
}
|
||||
const temporaryPath = findRandomTemporaryDirectory()
|
||||
await fsPromises.mkdir(libraryBasePath, { recursive: true })
|
||||
await fsPromises.mkdir(temporaryPath, { recursive: true })
|
||||
|
||||
const temporaryPath = findRandomTemporaryDirectory();
|
||||
await fsPromises.mkdir(libraryBasePath, { recursive: true });
|
||||
await fsPromises.mkdir(temporaryPath, { recursive: true });
|
||||
console.log(`Uploading library [${namespace}.${name}:${version}].`)
|
||||
try {
|
||||
await putFiles(temporaryPath, req.files)
|
||||
await fsPromises.rename(temporaryPath, libraryPath)
|
||||
} catch (error) {
|
||||
console.log(`Upload failed: [${error}].`)
|
||||
console.error(error.stack)
|
||||
return fail(500, 'Upload failed due to an internal error.')
|
||||
}
|
||||
|
||||
console.log(`Uploading library [${namespace}.${name}:${version}].`);
|
||||
try {
|
||||
await putFiles(temporaryPath, req.files);
|
||||
await fsPromises.rename(temporaryPath, libraryPath);
|
||||
} catch (error) {
|
||||
console.log(`Upload failed: [${error}].`);
|
||||
console.error(error.stack);
|
||||
return fail(500, "Upload failed due to an internal error.");
|
||||
}
|
||||
|
||||
console.log("Upload complete.");
|
||||
res.status(200).json({ message: "Successfully uploaded the library." });
|
||||
console.log('Upload complete.')
|
||||
res.status(200).json({ message: 'Successfully uploaded the library.' })
|
||||
}
|
||||
|
||||
/// Checks if a version complies with the semver specification.
|
||||
function isVersionValid(version) {
|
||||
return semverValid(version) !== null;
|
||||
return semverValid(version) !== null
|
||||
}
|
||||
|
||||
/// Checks if the namespace/username is valid.
|
||||
function isNamespaceValid(namespace) {
|
||||
return /^[A-Za-z][a-z0-9]*$/.test(namespace) && namespace.length >= 3;
|
||||
return /^[A-Za-z][a-z0-9]*$/.test(namespace) && namespace.length >= 3
|
||||
}
|
||||
|
||||
/** Checks if the library name is valid.
|
||||
@ -203,37 +199,37 @@ function isNamespaceValid(namespace) {
|
||||
* for safety.
|
||||
*/
|
||||
function isNameValid(name) {
|
||||
return /^[A-Za-z0-9_]+$/.test(name);
|
||||
return /^[A-Za-z0-9_]+$/.test(name)
|
||||
}
|
||||
|
||||
// TODO [RW] for now slashes are not permitted to avoid attacks; later on at least the `meta` directory should be allowed, but not much besides that
|
||||
/// Checks if the uploaded filename is valid.
|
||||
function isFilenameValid(name) {
|
||||
return /^[A-Za-z0-9][A-Za-z0-9\._\-]*$/.test(name);
|
||||
return /^[A-Za-z0-9][A-Za-z0-9\._\-]*$/.test(name)
|
||||
}
|
||||
|
||||
/// Schedules to remove the files, if they still exist.
|
||||
function cleanFiles(files) {
|
||||
files.forEach((file) => {
|
||||
if (fs.existsSync(file.path)) {
|
||||
fs.unlink(file.path, (err) => {
|
||||
if (err) {
|
||||
console.error(
|
||||
`Failed to remove ${file.path} ($file.originalname) from a failed upload: ${err}.`
|
||||
);
|
||||
files.forEach(file => {
|
||||
if (fs.existsSync(file.path)) {
|
||||
fs.unlink(file.path, err => {
|
||||
if (err) {
|
||||
console.error(
|
||||
`Failed to remove ${file.path} ($file.originalname) from a failed upload: ${err}.`
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
/// Moves the files to the provided destination directory.
|
||||
async function putFiles(directory, files) {
|
||||
for (var i = 0; i < files.length; ++i) {
|
||||
const file = files[i];
|
||||
const filename = file.originalname;
|
||||
const destination = path.join(directory, filename);
|
||||
await fsPromises.copyFile(file.path, destination);
|
||||
await fsPromises.unlink(file.path);
|
||||
}
|
||||
for (var i = 0; i < files.length; ++i) {
|
||||
const file = files[i]
|
||||
const filename = file.originalname
|
||||
const destination = path.join(directory, filename)
|
||||
await fsPromises.copyFile(file.path, destination)
|
||||
await fsPromises.unlink(file.path)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user