daml/azure-cron.yml
Gary Verhaegen 8ae2ba014d small tweaks to docs generation (#1126)
- Set reasonable timeout. We want this to run once per hour, so it
  should never run for longer than that.
- Fix gcp deploy command to target the right directory. That somehow got
  lost in the manual changes between the PR and what I was manually
  running on CI.
- Keep the not-found page intact, so as to not break 404 responses.
- Retry up to ten times to install nix packages, for each version. Nix
  package download can be a bit flaky, especially when we download large
  collections of packages (e.g. texlive), and Bazel is very unforgiving
  towards flaky nix downloads so instead we now preload all the packages
  before invokin Bazel.
2019-05-14 20:52:54 +00:00

93 lines
4.0 KiB
YAML

# Azure Pipelines file, see https://aka.ms/yaml
# scheduled triggers are not supported in pipeline syntax yet, so it had to be
# set up from Azure UI
trigger: none
pr: none
jobs:
- job: docs
timeoutInMinutes: 50
pool:
name: 'linux-pool'
steps:
- checkout: self
- bash: ci/dev-env-install.sh
displayName: 'Build/Install the Developer Environment'
- bash: ci/configure-bazel.sh
displayName: 'Configure Bazel'
env:
IS_FORK: $(System.PullRequest.IsFork)
GOOGLE_APPLICATION_CREDENTIALS_CONTENT: $(GOOGLE_APPLICATION_CREDENTIALS_CONTENT)
- bash: |
set -euo pipefail
robustly_download_nix_pkgs() {
# In recent commits, this is part of the dev-env-install script.
# However, we have to copy it here to apply it to older versions.
NIX_FAILED=0
for i in `seq 10`; do
NIX_FAILED=0
nix-build nix -A tools -A cached >$LOG 2>&1 || NIX_FAILED=1
if [[ $NIX_FAILED -ne 0 ]] && [[ $(tail -n 3 $LOG) == *"unexpected end-of-file"* ]]; then
echo " Restarting nix-build due to failed cache download"
continue
fi
break
done
if [[ $NIX_FAILED -ne 0 ]]; then
exit 1
fi
}
echo "Loading dev-env..."
eval "$(dev-env/bin/dade-assist)"
echo "Building docs listing"
DOCDIR=$(Build.StagingDirectory)/docs
mkdir -p $DOCDIR
LOG=$(Build.StagingDirectory)/log.txt
RELEASES=$(curl https://api.github.com/repos/digital-asset/daml/releases -s | jq -r '. | map(select(.prerelease == false)) | map(.tag_name)[]')
LATEST=$(echo $RELEASES | awk '{print $1}')
JSON_BODY=$(echo $RELEASES | sed -e 's/ /\n/g' | sed -e 's/v\(.*\)/"\1": "v\1",'/g)
echo "Building latest docs: $LATEST"
git checkout $LATEST >$LOG 2>&1
robustly_download_nix_pkgs
bazel build //docs:docs >$LOG 2>&1
tar xzf bazel-genfiles/docs/html.tar.gz --strip-components=1 -C $DOCDIR >$LOG 2>&1
# We need to overwrite the versions.json compiled by the build
echo "{${JSON_BODY%,}}" | jq '.' > $DOCDIR/versions.json
mkdir -p $DOCDIR/${LATEST#v}
tar xzf bazel-genfiles/docs/html.tar.gz --strip-components=1 -C $DOCDIR/${LATEST#v} >$LOG 2>&1
for version in $(echo $RELEASES | sed -e 's/ /\n/g' | sed '1d'); do
echo "Building older docs: $version"
git checkout $version >$LOG 2>&1
robustly_download_nix_pkgs
bazel build //docs:docs >$LOG 2>&1
mkdir -p $DOCDIR/${version#v}
tar xzf bazel-genfiles/docs/html.tar.gz --strip-components=1 -C $DOCDIR/${version#v} >$LOG 2>&1
done
echo "Maintain proper 404 page"
curl -s https://docs.daml.com/not-found.html > $DOCDIR/not-found.html
echo "Pushing to GCS bucket..."
GCS_KEY=$(mktemp)
echo "$GOOGLE_APPLICATION_CREDENTIALS_CONTENT" > $GCS_KEY
gcloud auth activate-service-account --key-file=$GCS_KEY >$LOG 2>&1
BOTO_CONFIG=/dev/null gsutil rsync -d -r $DOCDIR gs://daml-docs >$LOG 2>&1
echo "Done."
env:
GOOGLE_APPLICATION_CREDENTIALS_CONTENT: $(GOOGLE_APPLICATION_CREDENTIALS_CONTENT)
- task: PublishPipelineArtifact@0
condition: always()
inputs:
targetPath: $(Build.StagingDirectory)/log.txt
artifactName: "Logs"
- bash: |
set -euo pipefail
MESSAGE=$(git log --pretty=format:%s -n1)
curl -XPOST \
-i \
-H 'Content-type: application/json' \
--data "{\"text\":\"<!here> *FAILED* Daily Docs: <https://dev.azure.com/digitalasset/daml/_build/results?buildId=$(Build.BuildId)|$MESSAGE>\n\"}" \
$(Slack.URL)
condition: and(failed(), eq(variables['Build.SourceBranchName'], 'master'))