2020-02-19 03:06:35 +03:00
|
|
|
#!/usr/bin/env bash
|
2020-12-18 00:16:04 +03:00
|
|
|
set -euo pipefail
|
2020-02-19 03:06:35 +03:00
|
|
|
|
2020-04-30 14:52:54 +03:00
|
|
|
pushd() {
|
2021-06-28 19:36:55 +03:00
|
|
|
builtin pushd "$@" > /dev/null
|
2020-04-30 14:52:54 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
popd() {
|
2021-06-28 19:36:55 +03:00
|
|
|
builtin popd > /dev/null
|
2020-04-30 14:52:54 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pkg_json_version() {
|
|
|
|
jq -r .version package.json
|
|
|
|
}
|
|
|
|
|
2020-05-12 00:08:22 +03:00
|
|
|
vscode_version() {
|
|
|
|
jq -r .version lib/vscode/package.json
|
|
|
|
}
|
|
|
|
|
2020-04-30 14:52:54 +03:00
|
|
|
os() {
|
|
|
|
local os
|
|
|
|
os=$(uname | tr '[:upper:]' '[:lower:]')
|
|
|
|
if [[ $os == "linux" ]]; then
|
|
|
|
# Alpine's ldd doesn't have a version flag but if you use an invalid flag
|
|
|
|
# (like --version) it outputs the version to stderr and exits with 1.
|
|
|
|
local ldd_output
|
|
|
|
ldd_output=$(ldd --version 2>&1 || true)
|
|
|
|
if echo "$ldd_output" | grep -iq musl; then
|
|
|
|
os="alpine"
|
|
|
|
fi
|
2020-05-14 12:47:11 +03:00
|
|
|
elif [[ $os == "darwin" ]]; then
|
|
|
|
os="macos"
|
2020-02-19 03:06:35 +03:00
|
|
|
fi
|
2020-04-30 14:52:54 +03:00
|
|
|
echo "$os"
|
|
|
|
}
|
|
|
|
|
|
|
|
arch() {
|
2021-06-17 20:28:54 +03:00
|
|
|
cpu="$(uname -m)"
|
|
|
|
case "$cpu" in
|
2021-06-28 19:36:55 +03:00
|
|
|
aarch64)
|
|
|
|
echo arm64
|
|
|
|
;;
|
|
|
|
x86_64 | amd64)
|
|
|
|
echo amd64
|
|
|
|
;;
|
|
|
|
*)
|
|
|
|
echo "$cpu"
|
|
|
|
;;
|
2020-04-30 14:52:54 +03:00
|
|
|
esac
|
2020-02-19 03:06:35 +03:00
|
|
|
}
|
2020-05-12 00:08:22 +03:00
|
|
|
|
2021-05-17 22:59:41 +03:00
|
|
|
# Grabs the most recent ci.yaml github workflow run that was triggered from the
|
|
|
|
# pull request of the release branch for this version (regardless of whether
|
|
|
|
# that run succeeded or failed). The release branch name must be in semver
|
|
|
|
# format with a v prepended.
|
2020-05-12 00:08:22 +03:00
|
|
|
# This will contain the artifacts we want.
|
|
|
|
# https://developer.github.com/v3/actions/workflow-runs/#list-workflow-runs
|
|
|
|
get_artifacts_url() {
|
2021-03-26 00:27:46 +03:00
|
|
|
local artifacts_url
|
2021-05-11 00:24:13 +03:00
|
|
|
local workflow_runs_url="repos/:owner/:repo/actions/workflows/ci.yaml/runs?event=pull_request"
|
2021-03-26 20:06:14 +03:00
|
|
|
local version_branch="v$VERSION"
|
2021-04-19 23:51:33 +03:00
|
|
|
artifacts_url=$(gh api "$workflow_runs_url" | jq -r ".workflow_runs[] | select(.head_branch == \"$version_branch\") | .artifacts_url" | head -n 1)
|
2021-03-26 00:27:46 +03:00
|
|
|
if [[ -z "$artifacts_url" ]]; then
|
|
|
|
echo >&2 "ERROR: artifacts_url came back empty"
|
2021-07-17 01:37:26 +03:00
|
|
|
echo >&2 "We looked for a successful run triggered by a pull_request with for code-server version: $VERSION and a branch named $version_branch"
|
2021-04-19 23:51:33 +03:00
|
|
|
echo >&2 "URL used for gh API call: $workflow_runs_url"
|
2021-03-26 00:27:46 +03:00
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
echo "$artifacts_url"
|
2020-05-12 00:08:22 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
# Grabs the artifact's download url.
|
|
|
|
# https://developer.github.com/v3/actions/artifacts/#list-workflow-run-artifacts
|
|
|
|
get_artifact_url() {
|
|
|
|
local artifact_name="$1"
|
2021-04-19 23:51:33 +03:00
|
|
|
gh api "$(get_artifacts_url)" | jq -r ".artifacts[] | select(.name == \"$artifact_name\") | .archive_download_url" | head -n 1
|
2020-05-12 00:08:22 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
# Uses the above two functions to download a artifact into a directory.
|
|
|
|
download_artifact() {
|
|
|
|
local artifact_name="$1"
|
|
|
|
local dst="$2"
|
|
|
|
|
|
|
|
local tmp_file
|
|
|
|
tmp_file="$(mktemp)"
|
|
|
|
|
2021-06-28 19:36:55 +03:00
|
|
|
gh api "$(get_artifact_url "$artifact_name")" > "$tmp_file"
|
2020-05-16 17:55:46 +03:00
|
|
|
unzip -q -o "$tmp_file" -d "$dst"
|
2020-05-12 00:08:22 +03:00
|
|
|
rm "$tmp_file"
|
|
|
|
}
|
2020-05-16 17:55:46 +03:00
|
|
|
|
|
|
|
rsync() {
|
|
|
|
command rsync -a --del "$@"
|
|
|
|
}
|
|
|
|
|
|
|
|
VERSION="$(pkg_json_version)"
|
|
|
|
export VERSION
|
|
|
|
ARCH="$(arch)"
|
|
|
|
export ARCH
|
|
|
|
OS=$(os)
|
|
|
|
export OS
|
|
|
|
|
|
|
|
# RELEASE_PATH is the destination directory for the release from the root.
|
|
|
|
# Defaults to release
|
|
|
|
RELEASE_PATH="${RELEASE_PATH-release}"
|
2020-12-18 00:16:04 +03:00
|
|
|
|
2020-12-18 19:59:15 +03:00
|
|
|
# VS Code bundles some modules into an asar which is an archive format that
|
|
|
|
# works like tar. It then seems to get unpacked into node_modules.asar.
|
|
|
|
#
|
|
|
|
# I don't know why they do this but all the dependencies they bundle already
|
|
|
|
# exist in node_modules so just symlink it. We have to do this since not only VS
|
|
|
|
# Code itself but also extensions will look specifically in this directory for
|
|
|
|
# files (like the ripgrep binary or the oniguruma wasm).
|
2020-12-18 00:16:04 +03:00
|
|
|
symlink_asar() {
|
2021-06-03 19:30:37 +03:00
|
|
|
rm -rf node_modules.asar
|
2021-05-13 00:46:29 +03:00
|
|
|
if [ "${WINDIR-}" ]; then
|
|
|
|
# mklink takes the link name first.
|
|
|
|
mklink /J node_modules.asar node_modules
|
|
|
|
else
|
|
|
|
# ln takes the link name second.
|
|
|
|
ln -s node_modules node_modules.asar
|
2020-12-18 00:16:04 +03:00
|
|
|
fi
|
|
|
|
}
|