daml/azure-cron.yml

260 lines
11 KiB
YAML
Raw Normal View History

2019-05-14 15:25:02 +03:00
# Azure Pipelines file, see https://aka.ms/yaml
# Do not run on PRs
pr: none
2019-08-16 15:05:11 +03:00
# Do not run on merge to master
trigger: none
2019-08-16 15:05:11 +03:00
# Do run on a schedule (hourly)
2019-07-26 20:51:45 +03:00
#
2019-08-16 15:05:11 +03:00
# This is currently (2019-08-15) broken on Azure for GitHub-hosted repos. It
# does, however, work as expected for Azure-hosted repos. As a workaround, we
# have created a repo inside Azure that contains an `azure-pipelines.yml` file
# that just triggers this job.
#
# When the situation is resolved, delete that repo in Azure and uncomment the
# following. In the meantime, this should stay commented so we avoid running
# jobs twice when Azure fixes this issue.
#schedules:
#- cron: "0 * * * *"
# displayName: hourly cron
# branches:
# include:
# - master
# always: true
2019-05-14 15:25:02 +03:00
jobs:
- job: docs
timeoutInMinutes: 50
2019-05-14 15:25:02 +03:00
pool:
name: 'linux-pool'
steps:
- checkout: self
- bash: ci/dev-env-install.sh
displayName: 'Build/Install the Developer Environment'
- bash: ci/configure-bazel.sh
displayName: 'Configure Bazel'
env:
IS_FORK: $(System.PullRequest.IsFork)
GOOGLE_APPLICATION_CREDENTIALS_CONTENT: $(GOOGLE_APPLICATION_CREDENTIALS_CONTENT)
- bash: |
set -euo pipefail
LOG=$(Build.StagingDirectory)/log.txt
touch $LOG
CUR_SHA=$(git rev-parse HEAD)
robustly_download_nix_pkgs() {
# In recent commits, this is part of the dev-env-install script.
# However, we have to copy it here to apply it to older versions.
NIX_FAILED=0
for i in `seq 10`; do
NIX_FAILED=0
nix-build nix -A tools -A cached >$LOG 2>&1 || NIX_FAILED=1
if [[ $NIX_FAILED -ne 0 ]] && [[ $(tail -n 3 $LOG) == *"unexpected end-of-file"* ]]; then
echo " Restarting nix-build due to failed cache download"
continue
fi
break
done
if [[ $NIX_FAILED -ne 0 ]]; then
exit 1
fi
}
2019-05-14 15:25:02 +03:00
echo "Loading dev-env..."
eval "$(dev-env/bin/dade-assist)"
echo "Checking for new version..."
GH_RESP=$(mktemp)
curl https://api.github.com/repos/digital-asset/daml/releases -s > $GH_RESP
RELEASES=$(cat $GH_RESP | jq -r '. | map(select(.prerelease == false)) | map(.tag_name)[]')
GH_VERSIONS=$(mktemp)
DOCS_VERSIONS=$(mktemp)
curl -s https://docs.daml.com/versions.json | jq -r 'keys | .[]' | sort > $DOCS_VERSIONS
echo $RELEASES | sed 's/ /\n/g' | sed 's/^v//' | sort > $GH_VERSIONS
if diff $DOCS_VERSIONS $GH_VERSIONS; then
echo "No new version found, skipping."
exit 0
fi
2019-05-14 15:25:02 +03:00
echo "Building docs listing"
DOCDIR=$(Build.StagingDirectory)/docs
mkdir -p $DOCDIR
LATEST=$(echo $RELEASES | awk '{print $1}')
JSON_BODY=$(echo $RELEASES | sed -e 's/ /\n/g' | sed -e 's/v\(.*\)/"\1": "\1",'/g)
2019-05-14 15:25:02 +03:00
echo "Building latest docs: $LATEST"
git checkout $LATEST >$LOG 2>&1
robustly_download_nix_pkgs
2019-05-14 15:25:02 +03:00
bazel build //docs:docs >$LOG 2>&1
tar xzf bazel-genfiles/docs/html.tar.gz --strip-components=1 -C $DOCDIR >$LOG 2>&1
# We need to overwrite the versions.json compiled by the build
echo "{${JSON_BODY%,}}" | jq '.' > $DOCDIR/versions.json
mkdir -p $DOCDIR/${LATEST#v}
tar xzf bazel-genfiles/docs/html.tar.gz --strip-components=1 -C $DOCDIR/${LATEST#v} >$LOG 2>&1
for version in $(echo $RELEASES | sed -e 's/ /\n/g' | sed '1d'); do
echo "Building older docs: $version"
git checkout $version >$LOG 2>&1
robustly_download_nix_pkgs
2019-05-14 15:25:02 +03:00
bazel build //docs:docs >$LOG 2>&1
mkdir -p $DOCDIR/${version#v}
tar xzf bazel-genfiles/docs/html.tar.gz --strip-components=1 -C $DOCDIR/${version#v} >$LOG 2>&1
done
git checkout $CUR_SHA
echo "Done building docs bundle. Checking versions again to avoid race condition..."
S3_VERSIONS=$(mktemp)
aws s3 cp s3://docs-daml-com/versions.json $S3_VERSIONS
if diff $S3_VERSIONS $DOCDIR/versions.json; then
echo "No more new version, another process must have pushed already."
exit 0
fi
echo "Pushing new versions file first..."
aws s3 cp $DOCDIR/versions.json s3://docs-daml-com/versions.json --acl public-read
echo "Pushing to S3 bucket..."
aws s3 sync $DOCDIR \
s3://docs-daml-com/ \
--delete \
--acl public-read \
--exclude '*.doctrees/*' \
--exclude '*.buildinfo'
echo "Refreshing CloudFront cache..."
aws cloudfront create-invalidation \
--distribution-id E1U753I56ERH55 \
--paths '/*'
echo "Publishing $LATEST to Hubspot..."
DESCRIPTION_MD="$(cat $GH_RESP | jq -r '. | map(select(.tag_name="'$LATEST'"))[0].body')"
DESCRIPTION=$(curl -s -XPOST 'https://api.github.com/markdown/raw' -H 'Content-Type: text/plain' -d"$DESCRIPTION_MD" | jq -sR .)
DATE="$(date -d $(cat $GH_RESP | jq -r '. | map(select(.tag_name="'$LATEST'"))[0].published_at') +%s)000"
SUMMARY="Release notes for version ${LATEST#v}."
# content_group_id and blog_author_id reference existing items in HubSpot
ID=$(curl -s \
-XPOST 'https://api.hubapi.com/content/api/v2/blog-posts?hapikey='$HUBSPOT_TOKEN \
-d'{"name": "Release of DAML SDK '$LATEST'",
"post_body": '"$DESCRIPTION"',
"content_group_id": 11411412838,
"publish_date": '$DATE',
"post_summary": "'"$SUMMARY"'",
"slug": "'${LATEST#v}'",
"blog_author_id": 11513309969,
"meta_description": "'"$SUMMARY"'"}}' \
-H "Content-Type: application/json" \
| jq '.id')
curl -s \
-XPOST 'https://api.hubapi.com/content/api/v2/blog-posts/'$ID'/publish-action?hapikey='$HUBSPOT_TOKEN \
-d'{"action": "schedule-publish"}' \
-H "Content-Type: application/json"
2019-05-14 15:25:02 +03:00
echo "Done."
env:
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
HUBSPOT_TOKEN: $(HUBSPOT_TOKEN)
2019-05-14 15:25:02 +03:00
- task: PublishPipelineArtifact@0
condition: always()
inputs:
targetPath: $(Build.StagingDirectory)/log.txt
artifactName: "Logs"
- bash: |
set -euo pipefail
MESSAGE=$(git log --pretty=format:%s -n1)
curl -XPOST \
-i \
-H 'Content-type: application/json' \
--data "{\"text\":\"<!here> *FAILED* Daily Docs: <https://dev.azure.com/digitalasset/daml/_build/results?buildId=$(Build.BuildId)|$MESSAGE>\n\"}" \
$(Slack.URL)
condition: and(failed(), eq(variables['Build.SourceBranchName'], 'master'))
2019-06-21 21:39:24 +03:00
- job: docker_image
timeoutInMinutes: 60
2019-06-21 21:39:24 +03:00
pool:
name: 'linux-pool'
steps:
- checkout: self
- bash: |
set -euo pipefail
eval "$(dev-env/bin/dade-assist)"
echo $DOCKER_PASSWORD | docker login --username $DOCKER_LOGIN --password-stdin
RELEASES=$(curl https://api.github.com/repos/digital-asset/daml/releases -s | jq -r '. | map(select(.prerelease == false)) | map(.tag_name)[]')
DIR=$(pwd)
VERSIONS=$(curl 'https://hub.docker.com/v2/repositories/digitalasset/daml-sdk/tags/?page_size=10000' -s)
DOCKERFILE_CHANGE_TIME=$(date -uIs -d $(git log -1 --format="%cI" -- ci/docker/daml-sdk/Dockerfile))
2019-06-21 21:39:24 +03:00
for version in $(echo $RELEASES | sed -e 's/ /\n/g'); do
LAST_UPDATE=$(echo $VERSIONS | jq -r '.results[] | select(.name == "'${version#v}'") | .last_updated')
if [[ -n "$LAST_UPDATE" && "$LAST_UPDATE" > "$DOCKERFILE_CHANGE_TIME" ]]; then
echo "${version#v} already exists and is up to date, skipping."
2019-06-21 21:39:24 +03:00
else
if [[ -n "$LAST_UPDATE" ]]; then
echo "${version#v} already exists but Dockerfile has changed; rebuilding..."
fi
2019-06-21 21:39:24 +03:00
echo "Building version ${version#v}..."
cd ci/docker/daml-sdk
docker build -t digitalasset/daml-sdk:${version#v} --build-arg VERSION=${version#v} .
2019-06-22 19:47:25 +03:00
docker push digitalasset/daml-sdk:${version#v}
2019-06-21 21:39:24 +03:00
cd "$DIR"
echo "Done."
fi
done
env:
DOCKER_LOGIN: $(DOCKER_LOGIN)
DOCKER_PASSWORD: $(DOCKER_PASSWORD)
- job: vscode_marketplace
timeoutInMinutes: 10
pool:
name: 'linux-pool'
steps:
- checkout: self
- bash: |
set -euo pipefail
eval "$(dev-env/bin/dade-assist)"
AUTH=$(echo -n "OAuth:${MARKETPLACE_TOKEN}" | base64 -w0)
MARKET=$(curl -H "Authorization: Basic $AUTH" \
-H "Accept: application/json;api-version=5.0-preview.2" \
-s \
"https://marketplace.visualstudio.com/_apis/gallery/publishers/DigitalAssetHoldingsLLC/extensions/daml?flags=1" \
| jq -r '.versions[0].version')
GITHUB=$(curl https://api.github.com/repos/digital-asset/daml/releases -s | jq -r '. | map(select(.prerelease == false)) | map(.tag_name)[0]')
if [[ "${GITHUB#v}" != "$MARKET" ]] && git merge-base --is-ancestor 798e96c9b9034eac85ace786b9e1955cf380285c $GITHUB; then
echo "Publishing $GITHUB to VSCode Marketplace"
git checkout $GITHUB
cd compiler/daml-extension
# This produces out/src/extension.js
bazel run @nodejs//:bin/yarn
bazel run @nodejs//:bin/yarn compile
2019-07-10 22:54:23 +03:00
bazel run --run_under="cd $PWD && " @daml_extension_deps//vsce/bin:vsce -- publish ${GITHUB#v} -p $MARKETPLACE_TOKEN
else
if [[ "${GITHUB#v}" == "$MARKET" ]]; then
echo "Version on marketplace is already the latest ($GITHUB)."
else
echo "Latest version is not ready for marketplace publication."
fi
fi
env:
MARKETPLACE_TOKEN: $(VSCODE_MARKETPLACE_TOKEN)
- job: download_stats
timeoutInMinutes: 10
pool:
name: "linux-pool"
steps:
- checkout: self
- bash: |
set -euo pipefail
eval "$(dev-env/bin/dade-assist)"
STATS=$(mktemp)
curl https://api.github.com/repos/digital-asset/daml/releases -s | gzip -9 > $STATS
GCS_KEY=$(mktemp)
echo "$GOOGLE_APPLICATION_CREDENTIALS_CONTENT" > $GCS_KEY
gcloud auth activate-service-account --key-file=$GCS_KEY
BOTO_CONFIG=/dev/null gsutil cp $STATS gs://daml-data/downloads/$(date -u +%Y%m%d_%H%M%SZ).json.gz
env:
GOOGLE_APPLICATION_CREDENTIALS_CONTENT: $(GOOGLE_APPLICATION_CREDENTIALS_CONTENT)