add GCS bucket for docs (#1062)

This is a first step towards improving our docs release process. The
goal here is to get rid of the manual "publish docs" step. This is done
as a periodic check because we only want to run this for "published"
releases, i.e. the ones that are not marked as prerelease. Because the
act of publishing a release is a manual step that Azure cannot trigger
on, we instead opt for a periodic check.

Not included in this piece of work:
- Any change to the docs themselves; the goal here is to automate the
current process as a first step. Future plans for the docs themselves
include adding links to older versions of the docs.
- A better way to detect docs are already up-to-date, and abort if so.
- Including older versions of the docs.
- Switching the DNS record from the current AWS S3 bucket to this new
GCS bucket. That will be a manual step once we're happy with how the
new bucket works.
This commit is contained in:
Gary Verhaegen 2019-05-11 04:27:17 +01:00 committed by mergify[bot]
parent 526df6fc82
commit 5ab5ced2e3
3 changed files with 83 additions and 2 deletions

45
azure-daily.yml Normal file
View File

@ -0,0 +1,45 @@
# Azure Pipelines file, see https://aka.ms/yaml
# scheduled triggers are not supported in pipeline syntax yet, so it had to be
# set up from Azure UI
trigger: none
pr: none
jobs:
- job: docs
timeoutInMinutes: 360
pool:
name: 'linux-pool'
steps:
- checkout: self
- bash: ci/dev-env-install.sh
displayName: 'Build/Install the Developer Environment'
- bash: ci/configure-bazel.sh
displayName: 'Configure Bazel'
env:
IS_FORK: $(System.PullRequest.IsFork)
GOOGLE_APPLICATION_CREDENTIALS_CONTENT: $(GOOGLE_APPLICATION_CREDENTIALS_CONTENT)
- bash: |
set -euo pipefail
eval "$(dev-env/bin/dade-assist)"
GCS_KEY=$(mktemp)
echo "$GOOGLE_APPLICATION_CREDENTIALS_CONTENT" > $GCS_KEY
gcloud auth activate-service-account --key-file=$GCS_KEY
LATEST_RELEASE=$(curl https://api.github.com/repos/digital-asset/daml/releases -s | jq -r '. | map(select(.prerelease == false)) | map(.tag_name)[0]')
git checkout $LATEST_RELEASE
bazel build //docs
cp bazel-genfiles/docs/html.tar.gz $(Build.StagingDirectory)/
cd $(Build.StagingDirectory)
tar xzf html.tar.gz
BOTO_CONFIG=/dev/null gsutil rsync -d -r html gs://daml-docs
env:
GOOGLE_APPLICATION_CREDENTIALS_CONTENT: $(GOOGLE_APPLICATION_CREDENTIALS_CONTENT)
- bash: |
set -euo pipefail
MESSAGE=$(git log --pretty=format:%s -n1)
curl -XPOST \
-i \
-H 'Content-type: application/json' \
--data "{\"text\":\"<!here> *FAILED* Daily Docs: <https://dev.azure.com/digitalasset/daml/_build/results?buildId=$(Build.BuildId)|$MESSAGE>\n\"}" \
$(Slack.URL)
condition: and(failed(), eq(variables['Build.SourceBranchName'], 'master'))

View File

@ -16,10 +16,10 @@ is_windows() {
## Main
# always run in the project root
# always run in the project root
cd "$(dirname "$0")/.."
# detect the OS
# detect the OS
case $(uname) in
Linux)
os=linux

36
infra/docs_bucket.tf Normal file
View File

@ -0,0 +1,36 @@
# Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
// Setup the documentation bucket
locals {
doc_bucket = "daml-docs"
// see main.tf for additional locals
}
module "daml_docs" {
source = "./modules/gcp_cdn_bucket"
labels = "${local.labels}"
name = "${local.doc_bucket}"
project = "${local.project}"
region = "${local.region}"
ssl_certificate = "${local.ssl_certificate}"
// We do not want to delete anything here, but Terraform composition is hard
// so instead keep objects for 100 years.
cache_retention_days = 36500
}
// allow rw access for CI writer (see writer.tf)
resource "google_storage_bucket_iam_member" "docs_bucket_writer" {
bucket = "${module.daml_docs.bucket_name}"
# https://cloud.google.com/storage/docs/access-control/iam-roles
role = "roles/storage.objectAdmin"
member = "serviceAccount:${google_service_account.writer.email}"
}
output "daml_docs_ip" {
value = "${module.daml_docs.external_ip}"
}